From 502781c1d782e3c270585bc75056e4028ffb96b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Sat, 6 Jan 2018 19:34:27 +0100 Subject: [PATCH 001/227] test: fix spelling in test case comments PR-URL: https://github.com/nodejs/node/pull/18018 Reviewed-By: Jon Moss Reviewed-By: Anatoli Papirovski Reviewed-By: Weijia Wang Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Yuta Hiroto --- test/abort/test-http-parser-consume.js | 4 ++-- test/fixtures/net-fd-passing-receiver.js | 2 +- test/parallel/test-buffer-fill.js | 4 ++-- test/parallel/test-buffer-includes.js | 2 +- test/parallel/test-buffer-indexof.js | 2 +- test/parallel/test-buffer-read.js | 4 ++-- test/parallel/test-child-process-internal.js | 2 +- test/parallel/test-crypto-deprecated.js | 2 +- test/parallel/test-crypto-fips.js | 2 +- ...hrow-error-then-throw-from-uncaught-exception-handler.js | 2 +- test/parallel/test-event-emitter-remove-listeners.js | 4 ++-- test/parallel/test-fs-access.js | 2 +- test/parallel/test-http-agent-keepalive.js | 4 ++-- test/parallel/test-http-extra-response.js | 1 - test/parallel/test-https-agent-secure-protocol.js | 2 +- test/parallel/test-https-socket-options.js | 2 +- test/parallel/test-net-listen-error.js | 2 +- test/parallel/test-net-pipe-connect-errors.js | 2 +- test/parallel/test-os.js | 2 +- test/parallel/test-promises-unhandled-rejections.js | 2 +- test/parallel/test-readline-interface.js | 2 +- test/parallel/test-repl.js | 2 +- test/parallel/test-stream2-transform.js | 6 +++--- test/parallel/test-stream3-cork-uncork.js | 2 +- test/parallel/test-stringbytes-external.js | 2 +- test/parallel/test-tls-cnnic-whitelist.js | 2 +- test/parallel/test-tls-server-verify.js | 4 ++-- 27 files changed, 34 insertions(+), 35 deletions(-) diff --git a/test/abort/test-http-parser-consume.js b/test/abort/test-http-parser-consume.js index 9115aba70dbf17..673e04cfa3a573 100644 --- a/test/abort/test-http-parser-consume.js +++ b/test/abort/test-http-parser-consume.js @@ -11,12 +11,12 @@ if (process.argv[2] === 'child') { const rr = get({ port: server.address().port }, common.mustCall(() => { // This bad input (0) should abort the parser and the process rr.parser.consume(0); - // This line should be unreachanble. + // This line should be unreachable. assert.fail('this should be unreachable'); })); })); } else { - // super-proces + // super-process const child = spawn(process.execPath, [__filename, 'child']); child.stdout.on('data', common.mustNotCall()); diff --git a/test/fixtures/net-fd-passing-receiver.js b/test/fixtures/net-fd-passing-receiver.js index fb4faee1264464..e6d2de43862b12 100644 --- a/test/fixtures/net-fd-passing-receiver.js +++ b/test/fixtures/net-fd-passing-receiver.js @@ -24,7 +24,7 @@ receiver = net.createServer(function(socket) { }); }); -/* To signal the test runne we're up and listening */ +/* To signal the test runner we're up and listening */ receiver.on('listening', function() { console.log('ready'); }); diff --git a/test/parallel/test-buffer-fill.js b/test/parallel/test-buffer-fill.js index 8de05bc5dcd9ac..b4c7e2f139cb83 100644 --- a/test/parallel/test-buffer-fill.js +++ b/test/parallel/test-buffer-fill.js @@ -334,7 +334,7 @@ Buffer.alloc(8, ''); return 0; } else { elseWasLast = true; - // Once buffer.js calls the C++ implemenation of fill, return -1 + // Once buffer.js calls the C++ implementation of fill, return -1 return -1; } } @@ -367,7 +367,7 @@ assert.throws(() => { return 1; } else { elseWasLast = true; - // Once buffer.js calls the C++ implemenation of fill, return -1 + // Once buffer.js calls the C++ implementation of fill, return -1 return -1; } } diff --git a/test/parallel/test-buffer-includes.js b/test/parallel/test-buffer-includes.js index e610bc7e59fa33..eadeb8dd4e4b9b 100644 --- a/test/parallel/test-buffer-includes.js +++ b/test/parallel/test-buffer-includes.js @@ -137,7 +137,7 @@ assert.strictEqual( ); -// test usc2 encoding +// test ucs2 encoding let twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2'); assert(twoByteString.includes('\u0395', 4, 'ucs2')); diff --git a/test/parallel/test-buffer-indexof.js b/test/parallel/test-buffer-indexof.js index decb90c6db31d5..3f23683fc00179 100644 --- a/test/parallel/test-buffer-indexof.js +++ b/test/parallel/test-buffer-indexof.js @@ -553,7 +553,7 @@ assert.strictEqual(511, longBufferString.lastIndexOf(pattern, 1534)); // "yolo swag swag yolo swag yolo yolo swag" ..., goes on for about 5MB. // This is hard to search because it all looks similar, but never repeats. -// countBits returns the number of bits in the binary reprsentation of n. +// countBits returns the number of bits in the binary representation of n. function countBits(n) { let count; for (count = 0; n > 0; count++) { diff --git a/test/parallel/test-buffer-read.js b/test/parallel/test-buffer-read.js index 5eac575ff5ab81..e8f3bad383fd69 100644 --- a/test/parallel/test-buffer-read.js +++ b/test/parallel/test-buffer-read.js @@ -20,11 +20,11 @@ function read(buff, funx, args, expected) { } -// testing basic functionality of readDoubleBE() and readDOubleLE() +// testing basic functionality of readDoubleBE() and readDoubleLE() read(buf, 'readDoubleBE', [1], -3.1827727774563287e+295); read(buf, 'readDoubleLE', [1], -6.966010051009108e+144); -// testing basic functionality of readFLoatBE() and readFloatLE() +// testing basic functionality of readFloatBE() and readFloatLE() read(buf, 'readFloatBE', [1], -1.6691549692541768e+37); read(buf, 'readFloatLE', [1], -7861303808); diff --git a/test/parallel/test-child-process-internal.js b/test/parallel/test-child-process-internal.js index bf5554bc2d45f9..b5c5d2cb77b431 100644 --- a/test/parallel/test-child-process-internal.js +++ b/test/parallel/test-child-process-internal.js @@ -11,7 +11,7 @@ if (process.argv[2] === 'child') { //send non-internal message containing PREFIX at a non prefix position process.send(normal); - //send inernal message + //send internal message process.send(internal); process.exit(0); diff --git a/test/parallel/test-crypto-deprecated.js b/test/parallel/test-crypto-deprecated.js index 84f25316d49b61..acdd71301fbed0 100644 --- a/test/parallel/test-crypto-deprecated.js +++ b/test/parallel/test-crypto-deprecated.js @@ -14,7 +14,7 @@ common.expectWarning('DeprecationWarning', [ // Accessing the deprecated function is enough to trigger the warning event. // It does not need to be called. So the assert serves the purpose of both -// triggering the warning event and confirming that the deprected function is +// triggering the warning event and confirming that the deprecated function is // mapped to the correct non-deprecated function. assert.strictEqual(crypto.Credentials, tls.SecureContext); assert.strictEqual(crypto.createCredentials, tls.createSecureContext); diff --git a/test/parallel/test-crypto-fips.js b/test/parallel/test-crypto-fips.js index ffab5d19ba2869..54f85188034e84 100644 --- a/test/parallel/test-crypto-fips.js +++ b/test/parallel/test-crypto-fips.js @@ -91,7 +91,7 @@ testHelper( // to try to call the fips setter, to try to detect this situation, as // that would throw an error: // ("Error: Cannot set FIPS mode in a non-FIPS build."). -// Due to this uncertanty the following tests are skipped when configured +// Due to this uncertainty the following tests are skipped when configured // with --shared-openssl. if (!sharedOpenSSL()) { // OpenSSL config file should be able to turn on FIPS mode diff --git a/test/parallel/test-domain-throw-error-then-throw-from-uncaught-exception-handler.js b/test/parallel/test-domain-throw-error-then-throw-from-uncaught-exception-handler.js index 089300bc481c10..a2afebd838f410 100644 --- a/test/parallel/test-domain-throw-error-then-throw-from-uncaught-exception-handler.js +++ b/test/parallel/test-domain-throw-error-then-throw-from-uncaught-exception-handler.js @@ -25,7 +25,7 @@ if (process.argv[2] === 'child') { // is not properly flushed in V8's Isolate::Throw right before the // process aborts due to an uncaught exception, and thus the error // message representing the error that was thrown cannot be read by the - // parent process. So instead of parsing the child process' stdandard + // parent process. So instead of parsing the child process' standard // error, the parent process will check that in the case // --abort-on-uncaught-exception was passed, the process did not exit // with exit code RAN_UNCAUGHT_EXCEPTION_HANDLER_EXIT_CODE. diff --git a/test/parallel/test-event-emitter-remove-listeners.js b/test/parallel/test-event-emitter-remove-listeners.js index ed28bc7308ea9c..f365cbba3f0fff 100644 --- a/test/parallel/test-event-emitter-remove-listeners.js +++ b/test/parallel/test-event-emitter-remove-listeners.js @@ -98,10 +98,10 @@ function listener2() {} // listener4 will still be called although it is removed by listener 3. ee.emit('hello'); - // This is so because the interal listener array at time of emit + // This is so because the internal listener array at time of emit // was [listener3,listener4] - // Interal listener array [listener3] + // Internal listener array [listener3] ee.emit('hello'); } diff --git a/test/parallel/test-fs-access.js b/test/parallel/test-fs-access.js index 1579b71ae6e370..67187672017ca2 100644 --- a/test/parallel/test-fs-access.js +++ b/test/parallel/test-fs-access.js @@ -37,7 +37,7 @@ createFileWithPerms(readWriteFile, 0o666); * The change of user id is done after creating the fixtures files for the same * reason: the test may be run as the superuser within a directory in which * only the superuser can create files, and thus it may need superuser - * priviledges to create them. + * privileges to create them. * * There's not really any point in resetting the process' user id to 0 after * changing it to 'nobody', since in the case that the test runs without diff --git a/test/parallel/test-http-agent-keepalive.js b/test/parallel/test-http-agent-keepalive.js index 7f9626d4669ef9..7215ab0b438a08 100644 --- a/test/parallel/test-http-agent-keepalive.js +++ b/test/parallel/test-http-agent-keepalive.js @@ -68,7 +68,7 @@ function remoteClose() { process.nextTick(common.mustCall(() => { assert.strictEqual(agent.sockets[name], undefined); assert.strictEqual(agent.freeSockets[name].length, 1); - // waitting remote server close the socket + // waiting remote server close the socket setTimeout(common.mustCall(() => { assert.strictEqual(agent.sockets[name], undefined); assert.strictEqual(agent.freeSockets[name], undefined, @@ -81,7 +81,7 @@ function remoteClose() { } function remoteError() { - // remove server will destroy ths socket + // remote server will destroy the socket const req = get('/error', common.mustNotCall()); req.on('error', common.mustCall((err) => { assert(err); diff --git a/test/parallel/test-http-extra-response.js b/test/parallel/test-http-extra-response.js index 3cdc96a2d6d56b..c092f17f2a6385 100644 --- a/test/parallel/test-http-extra-response.js +++ b/test/parallel/test-http-extra-response.js @@ -30,7 +30,6 @@ const server = net.createServer(function(socket) { if (postBody.includes('\r\n')) { socket.write(fullResponse); - // omg, I wrote the response twice, what a terrible HTTP server I am. socket.end(fullResponse); } }); diff --git a/test/parallel/test-https-agent-secure-protocol.js b/test/parallel/test-https-agent-secure-protocol.js index 737bd6692f26aa..0d6b8c340dcac2 100644 --- a/test/parallel/test-https-agent-secure-protocol.js +++ b/test/parallel/test-https-agent-secure-protocol.js @@ -45,7 +45,7 @@ server.listen(0, common.mustCall(function() { }, common.mustCall(function(res) { res.resume(); globalAgent.once('free', common.mustCall(function() { - // Verify that two keep-alived connections are created + // Verify that two keep-alive connections are created // due to the different secureProtocol settings: const keys = Object.keys(globalAgent.freeSockets); assert.strictEqual(keys.length, 2); diff --git a/test/parallel/test-https-socket-options.js b/test/parallel/test-https-socket-options.js index f43ad6c3726ab3..af31677c4be92f 100644 --- a/test/parallel/test-https-socket-options.js +++ b/test/parallel/test-https-socket-options.js @@ -40,7 +40,7 @@ server_http.listen(0, function() { }); // Then try https server (requires functions to be -// mirroed in tls.js's CryptoStream) +// mirrored in tls.js's CryptoStream) const server_https = https.createServer(options, function(req, res) { console.log('got HTTPS request'); diff --git a/test/parallel/test-net-listen-error.js b/test/parallel/test-net-listen-error.js index d672226de7b37f..8d6e9d10b22e5c 100644 --- a/test/parallel/test-net-listen-error.js +++ b/test/parallel/test-net-listen-error.js @@ -4,5 +4,5 @@ const net = require('net'); const server = net.createServer(function(socket) { }); -server.listen(1, '1.1.1.1', common.mustNotCall()); // EACCESS or EADDRNOTAVAIL +server.listen(1, '1.1.1.1', common.mustNotCall()); // EACCES or EADDRNOTAVAIL server.on('error', common.mustCall(function(error) {})); diff --git a/test/parallel/test-net-pipe-connect-errors.js b/test/parallel/test-net-pipe-connect-errors.js index 1bb56c62c93ee9..1dfbe092073f29 100644 --- a/test/parallel/test-net-pipe-connect-errors.js +++ b/test/parallel/test-net-pipe-connect-errors.js @@ -16,7 +16,7 @@ if (common.isWindows) { emptyTxt = fixtures.path('empty.txt'); } else { common.refreshTmpDir(); - // Keep the file name very short so tht we don't exceed the 108 char limit + // Keep the file name very short so that we don't exceed the 108 char limit // on CI for a POSIX socket. Even though this isn't actually a socket file, // the error will be different from the one we are expecting if we exceed the // limit. diff --git a/test/parallel/test-os.js b/test/parallel/test-os.js index 1bd7e01eeed4bf..fb2d440eed1d6c 100644 --- a/test/parallel/test-os.js +++ b/test/parallel/test-os.js @@ -92,7 +92,7 @@ is.string(arch); assert.ok(arch.length > 0); if (!common.isSunOS) { - // not implemeneted yet + // not implemented yet assert.ok(os.loadavg().length > 0); assert.ok(os.freemem() > 0); assert.ok(os.totalmem() > 0); diff --git a/test/parallel/test-promises-unhandled-rejections.js b/test/parallel/test-promises-unhandled-rejections.js index 934073aecccdda..d5323db19aafd7 100644 --- a/test/parallel/test-promises-unhandled-rejections.js +++ b/test/parallel/test-promises-unhandled-rejections.js @@ -293,7 +293,7 @@ asyncTest('While inside setImmediate, catching a rejected promise derived ' + }); }); -// State adapation tests +// State adaptation tests asyncTest('catching a promise which is asynchronously rejected (via ' + 'resolution to an asynchronously-rejected promise) prevents' + ' unhandledRejection', function(done) { diff --git a/test/parallel/test-readline-interface.js b/test/parallel/test-readline-interface.js index 8da5982cff9ada..7840794e9739a6 100644 --- a/test/parallel/test-readline-interface.js +++ b/test/parallel/test-readline-interface.js @@ -765,7 +765,7 @@ function isWarned(emitter) { assert.strictEqual(isWarned(process.stdout._events), false); } - // can create a new readline Interface with a null output arugument + // can create a new readline Interface with a null output argument { const fi = new FakeInput(); const rli = new readline.Interface( diff --git a/test/parallel/test-repl.js b/test/parallel/test-repl.js index 44cd7f3383caeb..31ecbe2f2c790f 100644 --- a/test/parallel/test-repl.js +++ b/test/parallel/test-repl.js @@ -281,7 +281,7 @@ function error_test() { { client: client_unix, send: '/(.)(.)(.)(.)(.)(.)(.)(.)(.)/.test(\'123456789\')\n', expect: `true\n${prompt_unix}` }, - // the following test's result depends on the RegEx's match from the above + // the following test's result depends on the RegExp's match from the above { client: client_unix, send: 'RegExp.$1\nRegExp.$2\nRegExp.$3\nRegExp.$4\nRegExp.$5\n' + 'RegExp.$6\nRegExp.$7\nRegExp.$8\nRegExp.$9\n', diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js index d2ca48f0f46ad5..b2968758cfa1cc 100644 --- a/test/parallel/test-stream2-transform.js +++ b/test/parallel/test-stream2-transform.js @@ -154,7 +154,7 @@ const Transform = require('_stream_transform'); } { - // Verify assymetric transform (expand) + // Verify asymmetric transform (expand) const pt = new Transform(); // emit each chunk 2 times. @@ -186,7 +186,7 @@ const Transform = require('_stream_transform'); } { - // Verify assymetric trasform (compress) + // Verify asymmetric transform (compress) const pt = new Transform(); // each output is the first char of 3 consecutive chunks, @@ -241,7 +241,7 @@ const Transform = require('_stream_transform'); // this tests for a stall when data is written to a full stream // that has empty transforms. { - // Verify compex transform behavior + // Verify complex transform behavior let count = 0; let saved = null; const pt = new Transform({highWaterMark: 3}); diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js index 2e8e86be1ef058..f8b411c84eded6 100644 --- a/test/parallel/test-stream3-cork-uncork.js +++ b/test/parallel/test-stream3-cork-uncork.js @@ -65,7 +65,7 @@ writeChunks(inputChunks, () => { // trigger writing out the buffer w.uncork(); - // buffered bytes shoud be seen in current tick + // buffered bytes should be seen in current tick assert.strictEqual(seenChunks.length, 4); // did the chunks match diff --git a/test/parallel/test-stringbytes-external.js b/test/parallel/test-stringbytes-external.js index 3775ecc13ec2dd..a2dae7786bbfab 100644 --- a/test/parallel/test-stringbytes-external.js +++ b/test/parallel/test-stringbytes-external.js @@ -61,7 +61,7 @@ assert.strictEqual(c_bin.toString('latin1'), ucs2_control); assert.strictEqual(c_ucs.toString('latin1'), ucs2_control); -// now let's test BASE64 and HEX ecoding/decoding +// now let's test BASE64 and HEX encoding/decoding const RADIOS = 2; const PRE_HALF_APEX = Math.ceil(EXTERN_APEX / 2) - RADIOS; const PRE_3OF4_APEX = Math.ceil((EXTERN_APEX / 4) * 3) - RADIOS; diff --git a/test/parallel/test-tls-cnnic-whitelist.js b/test/parallel/test-tls-cnnic-whitelist.js index 4887526435fa2d..80f188f36670a1 100644 --- a/test/parallel/test-tls-cnnic-whitelist.js +++ b/test/parallel/test-tls-cnnic-whitelist.js @@ -31,7 +31,7 @@ const testCases = [ errorCode: 'UNABLE_TO_VERIFY_LEAF_SIGNATURE' }, // Test 1: for the fix of node#2061 - // agent6-cert.pem is signed by intermidate cert of ca3. + // agent6-cert.pem is signed by intermediate cert of ca3. // The server has a cert chain of agent6->ca3->ca1(root) but // tls.connect should be failed with an error of // UNABLE_TO_GET_ISSUER_CERT_LOCALLY since the root CA of ca1 is not diff --git a/test/parallel/test-tls-server-verify.js b/test/parallel/test-tls-server-verify.js index 6f045235217ee8..51dfc06c565800 100644 --- a/test/parallel/test-tls-server-verify.js +++ b/test/parallel/test-tls-server-verify.js @@ -265,8 +265,8 @@ function runTest(port, testIndex) { let renegotiated = false; const server = tls.Server(serverOptions, function handleConnection(c) { c.on('error', function(e) { - // child.kill() leads ECONNRESET errro in the TLS connection of - // openssl s_client via spawn(). A Test result is already + // child.kill() leads ECONNRESET error in the TLS connection of + // openssl s_client via spawn(). A test result is already // checked by the data of client.stdout before child.kill() so // these tls errors can be ignored. }); From f81a69aefec99470b9527edf00da2c29e6db3bb3 Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Tue, 13 Mar 2018 20:42:33 +0100 Subject: [PATCH 002/227] =?UTF-8?q?fs:=20fix=20`createReadStream(=E2=80=A6?= =?UTF-8?q?,=20{end:=20n})`=20for=20non-seekable=20fds?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 82bdf8fba2d3f fixed an issue by silently modifying the `start` option for the case when only `end` is passed, in order to perform reads from a specified range in the file. However, that approach does not work for non-seekable files, since a numeric `start` option means that positioned reads will be used to read data from the file. This patch fixes that, and instead ends reading after a specified size by adjusting the read buffer size. This way we avoid re-introducing the bug that 82bdf8fba2d3f fixed, and align behaviour with the native file stream mechanism introduced in https://github.com/nodejs/node/pull/18936 as well. Backport-PR-URL: https://github.com/nodejs/node/pull/19411 PR-URL: https://github.com/nodejs/node/pull/19329 Fixes: https://github.com/nodejs/node/issues/19240 Refs: https://github.com/nodejs/node/pull/18121 Reviewed-By: James M Snell Reviewed-By: Matteo Collina Reviewed-By: Chen Gang --- lib/fs.js | 11 ++++++-- test/parallel/test-fs-read-stream.js | 39 ++++++++++++++++++++++++---- 2 files changed, 43 insertions(+), 7 deletions(-) diff --git a/lib/fs.js b/lib/fs.js index 5efccceb544d20..b0d117673d6bfc 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -1919,8 +1919,7 @@ function ReadStream(path, options) { this.flags = options.flags === undefined ? 'r' : options.flags; this.mode = options.mode === undefined ? 0o666 : options.mode; - this.start = typeof this.fd !== 'number' && options.start === undefined ? - 0 : options.start; + this.start = options.start; this.end = options.end; this.autoClose = options.autoClose === undefined ? true : options.autoClose; this.pos = undefined; @@ -1943,6 +1942,12 @@ function ReadStream(path, options) { this.pos = this.start; } + // Backwards compatibility: Make sure `end` is a number regardless of `start`. + // TODO(addaleax): Make the above typecheck not depend on `start` instead. + // (That is a semver-major change). + if (typeof this.end !== 'number') + this.end = Infinity; + if (typeof this.fd !== 'number') this.open(); @@ -1996,6 +2001,8 @@ ReadStream.prototype._read = function(n) { if (this.pos !== undefined) toRead = Math.min(this.end - this.pos + 1, toRead); + else + toRead = Math.min(this.end - this.bytesRead + 1, toRead); // already read everything we were supposed to read! // treat as EOF. diff --git a/test/parallel/test-fs-read-stream.js b/test/parallel/test-fs-read-stream.js index 95d5fbeaef9973..5161fd068b14d6 100644 --- a/test/parallel/test-fs-read-stream.js +++ b/test/parallel/test-fs-read-stream.js @@ -1,5 +1,7 @@ 'use strict'; const common = require('../common'); + +const child_process = require('child_process'); const assert = require('assert'); const fixtures = require('../common/fixtures'); @@ -146,11 +148,6 @@ stream.on('end', function() { })); } -// pause and then resume immediately. -const pauseRes = fs.createReadStream(rangeFile); -pauseRes.pause(); -pauseRes.resume(); - let file7 = fs.createReadStream(rangeFile, {autoClose: false }); file7.on('data', () => {}); file7.on('end', function() { @@ -173,6 +170,38 @@ function file7Next() { }); } +if (!common.isWindows) { + // Verify that end works when start is not specified, and we do not try to + // use positioned reads. This makes sure that this keeps working for + // non-seekable file descriptors. + common.refreshTmpDir(); + const filename = `${common.tmpDir}/foo.pipe`; + const mkfifoResult = child_process.spawnSync('mkfifo', [filename]); + if (!mkfifoResult.error) { + child_process.exec(`echo "xyz foobar" > '${filename}'`); + const stream = new fs.createReadStream(filename, { end: 1 }); + stream.data = ''; + + stream.on('data', function(chunk) { + stream.data += chunk; + }); + + stream.on('end', common.mustCall(function() { + assert.strictEqual('xy', stream.data); + fs.unlinkSync(filename); + })); + } else { + common.printSkipMessage('mkfifo not available'); + } +} + +{ + // pause and then resume immediately. + const pauseRes = fs.createReadStream(rangeFile); + pauseRes.pause(); + pauseRes.resume(); +} + // Just to make sure autoClose won't close the stream because of error. const file8 = fs.createReadStream(null, {fd: 13337, autoClose: false }); file8.on('data', () => {}); From 4d2efa241535c7af32f1533139d73deacb3c86f2 Mon Sep 17 00:00:00 2001 From: Beth Griggs Date: Thu, 1 Mar 2018 12:23:31 +0000 Subject: [PATCH 003/227] test: remove mark flaky for moved test Remove 'flaky' in parallel.status for test-debug-signal-cluster as the test was moved to sequential. Refs: https://github.com/nodejs/node/pull/13592 PR-URL: https://github.com/nodejs/node/pull/19069 Reviewed-By: Gireesh Punathil Reviewed-By: Benjamin Gruenbaum Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Richard Lau Reviewed-By: Gibson Fahnestock --- test/parallel/parallel.status | 4 ---- 1 file changed, 4 deletions(-) diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status index 65da4e2c75ae20..b74b4a2c29c318 100644 --- a/test/parallel/parallel.status +++ b/test/parallel/parallel.status @@ -17,11 +17,7 @@ test-fs-read-buffer-tostring-fail : PASS,FLAKY test-npm-install: PASS,FLAKY [$system==solaris] # Also applies to SmartOS -test-debug-signal-cluster : PASS,FLAKY [$system==freebsd] [$system==aix] -#covered by https://github.com/nodejs/node/issues/3796 -# but more frequent on AIX ? -test-debug-signal-cluster : PASS, FLAKY \ No newline at end of file From 969398d08e062ec6dfe77de5cc6bc336f1a89dcc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Mon, 18 Dec 2017 13:23:46 +0100 Subject: [PATCH 004/227] crypto: reuse variable instead of reevaluation Backport-PR-URL: https://github.com/nodejs/node/pull/19115 PR-URL: https://github.com/nodejs/node/pull/17735 Reviewed-By: Colin Ihrig Reviewed-By: Daniel Bevenius Reviewed-By: Jon Moss Reviewed-By: James M Snell Reviewed-By: Luigi Pinca --- src/node_crypto.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/node_crypto.cc b/src/node_crypto.cc index fddb125a726696..3a9ebe7e97ad9f 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -3256,7 +3256,7 @@ void CipherBase::Init(const char* cipher_type, nullptr, reinterpret_cast(key), reinterpret_cast(iv), - kind_ == kCipher); + encrypt); initialised_ = true; } @@ -3324,7 +3324,7 @@ void CipherBase::InitIv(const char* cipher_type, nullptr, reinterpret_cast(key), reinterpret_cast(iv), - kind_ == kCipher); + encrypt); initialised_ = true; } From 46aed5800f7989f47bc134c6cd06124b11600087 Mon Sep 17 00:00:00 2001 From: Lance Ball Date: Wed, 22 Nov 2017 13:49:50 -0500 Subject: [PATCH 005/227] test: make common.mustNotCall show file:linenumber MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When a test fails via `common.mustNotCall` it is sometimes hard to determine exactly what was called. This modification stores the caller's file and line number by using the V8 Error API to capture a stack at the time `common.mustNotCall()` is called. In the event of failure, this information is printed. This change also exposes a new function in test/common, `getCallSite()` which accepts a `function` and returns a `String` with the file name and line number for the function. Backport-PR-URL: https://github.com/nodejs/node/pull/19355 PR-URL: https://github.com/nodejs/node/pull/17257 Reviewed-By: James M Snell Reviewed-By: Michaël Zasso Reviewed-By: Gibson Fahnestock Reviewed-By: Tobias Nießen Reviewed-By: Joyee Cheung Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Khaidi Chu --- test/common/README.md | 6 +++ test/common/index.js | 44 +++++++++++++++++++++- test/parallel/test-common-must-not-call.js | 26 +++++++++++++ 3 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 test/parallel/test-common-must-not-call.js diff --git a/test/common/README.md b/test/common/README.md index 4e7e955487bd3f..8a6ce5f60456c5 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -94,6 +94,12 @@ Path to the 'fixtures' directory. Returns an instance of all possible `ArrayBufferView`s of the provided Buffer. +### getCallSite(func) +* `func` [<Function>] +* return [<String>] + +Returns the file name and line number for the provided Function. + ### globalCheck * [<Boolean>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) diff --git a/test/common/index.js b/test/common/index.js index 7747cb57cf3ae7..d762f4e0aa8498 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -480,9 +480,51 @@ exports.fileExists = function(pathname) { } }; +exports.canCreateSymLink = function() { + // On Windows, creating symlinks requires admin privileges. + // We'll only try to run symlink test if we have enough privileges. + // On other platforms, creating symlinks shouldn't need admin privileges + if (exports.isWindows) { + // whoami.exe needs to be the one from System32 + // If unix tools are in the path, they can shadow the one we want, + // so use the full path while executing whoami + const whoamiPath = path.join(process.env['SystemRoot'], + 'System32', 'whoami.exe'); + + let err = false; + let output = ''; + + try { + output = execSync(`${whoamiPath} /priv`, { timout: 1000 }); + } catch (e) { + err = true; + } finally { + if (err || !output.includes('SeCreateSymbolicLinkPrivilege')) { + return false; + } + } + } + + return true; +}; + +exports.getCallSite = function getCallSite(top) { + const originalStackFormatter = Error.prepareStackTrace; + Error.prepareStackTrace = (err, stack) => + `${stack[0].getFileName()}:${stack[0].getLineNumber()}`; + const err = new Error(); + Error.captureStackTrace(err, top); + // with the V8 Error API, the stack is not formatted until it is accessed + err.stack; + Error.prepareStackTrace = originalStackFormatter; + return err.stack; +}; + exports.mustNotCall = function(msg) { + const callSite = exports.getCallSite(exports.mustNotCall); return function mustNotCall() { - assert.fail(msg || 'function should not have been called'); + assert.fail( + `${msg || 'function should not have been called'} at ${callSite}`); }; }; diff --git a/test/parallel/test-common-must-not-call.js b/test/parallel/test-common-must-not-call.js new file mode 100644 index 00000000000000..d70daabf0a4bd0 --- /dev/null +++ b/test/parallel/test-common-must-not-call.js @@ -0,0 +1,26 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const path = require('path'); + +const message = 'message'; +const testFunction = common.mustNotCall(message); + +const validateError = common.mustCall((e) => { + const prefix = `${message} at `; + assert.ok(e.message.startsWith(prefix)); + if (process.platform === 'win32') { + e.message = e.message.substring(2); // remove 'C:' + } + const [ fileName, lineNumber ] = e.message + .substring(prefix.length).split(':'); + assert.strictEqual(path.basename(fileName), 'test-common-must-not-call.js'); + assert.strictEqual(lineNumber, '8'); +}); + +try { + testFunction(); +} catch (e) { + validateError(e); +} From 5ca8dee8cbff435b62a574027ec5c2daa76eba52 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Thu, 15 Mar 2018 12:14:54 -0400 Subject: [PATCH 006/227] test: remove n-api intermediate files PR-URL: https://github.com/nodejs/node/pull/19375 Reviewed-By: Myles Borins --- test/addons-napi/1_hello_world/build/Makefile | 342 ---------- .../1_hello_world/build/binding.Makefile | 6 - .../1_hello_world/build/binding.target.mk | 178 ----- .../1_hello_world/build/config.gypi | 70 -- .../1_hello_world/build/gyp-mac-tool | 611 ------------------ .../2_function_arguments/build/Makefile | 342 ---------- .../build/binding.Makefile | 6 - .../build/binding.target.mk | 178 ----- .../2_function_arguments/build/config.gypi | 70 -- .../2_function_arguments/build/gyp-mac-tool | 611 ------------------ test/addons-napi/3_callbacks/build/Makefile | 342 ---------- .../3_callbacks/build/binding.Makefile | 6 - .../3_callbacks/build/binding.target.mk | 178 ----- .../addons-napi/3_callbacks/build/config.gypi | 70 -- .../3_callbacks/build/gyp-mac-tool | 611 ------------------ .../4_object_factory/build/Makefile | 342 ---------- .../4_object_factory/build/binding.Makefile | 6 - .../4_object_factory/build/binding.target.mk | 178 ----- .../4_object_factory/build/config.gypi | 70 -- .../4_object_factory/build/gyp-mac-tool | 611 ------------------ .../5_function_factory/build/Makefile | 342 ---------- .../5_function_factory/build/binding.Makefile | 6 - .../build/binding.target.mk | 178 ----- .../5_function_factory/build/config.gypi | 70 -- .../5_function_factory/build/gyp-mac-tool | 611 ------------------ test/addons-napi/6_object_wrap/build/Makefile | 342 ---------- .../6_object_wrap/build/binding.Makefile | 6 - .../6_object_wrap/build/binding.target.mk | 179 ----- .../6_object_wrap/build/config.gypi | 70 -- .../6_object_wrap/build/gyp-mac-tool | 611 ------------------ .../addons-napi/7_factory_wrap/build/Makefile | 342 ---------- .../7_factory_wrap/build/binding.Makefile | 6 - .../7_factory_wrap/build/binding.target.mk | 179 ----- .../7_factory_wrap/build/config.gypi | 70 -- .../7_factory_wrap/build/gyp-mac-tool | 611 ------------------ .../8_passing_wrapped/build/Makefile | 342 ---------- .../8_passing_wrapped/build/binding.Makefile | 6 - .../8_passing_wrapped/build/binding.target.mk | 179 ----- .../8_passing_wrapped/build/config.gypi | 70 -- .../8_passing_wrapped/build/gyp-mac-tool | 611 ------------------ test/addons-napi/test_array/build/Makefile | 342 ---------- .../test_array/build/binding.Makefile | 6 - test/addons-napi/test_array/build/config.gypi | 70 -- .../addons-napi/test_array/build/gyp-mac-tool | 611 ------------------ .../test_array/build/test_array.target.mk | 178 ----- test/addons-napi/test_async/build/Makefile | 342 ---------- .../test_async/build/binding.Makefile | 6 - test/addons-napi/test_async/build/config.gypi | 70 -- .../addons-napi/test_async/build/gyp-mac-tool | 611 ------------------ .../test_async/build/test_async.target.mk | 178 ----- test/addons-napi/test_buffer/build/Makefile | 342 ---------- .../test_buffer/build/binding.Makefile | 6 - .../addons-napi/test_buffer/build/config.gypi | 70 -- .../test_buffer/build/gyp-mac-tool | 611 ------------------ .../test_buffer/build/test_buffer.target.mk | 178 ----- .../test_constructor/build/Makefile | 342 ---------- .../test_constructor/build/binding.Makefile | 6 - .../test_constructor/build/config.gypi | 70 -- .../test_constructor/build/gyp-mac-tool | 611 ------------------ .../build/test_constructor.target.mk | 178 ----- .../test_conversions/build/Makefile | 342 ---------- .../test_conversions/build/binding.Makefile | 6 - .../test_conversions/build/config.gypi | 70 -- .../test_conversions/build/gyp-mac-tool | 611 ------------------ .../build/test_conversions.target.mk | 178 ----- test/addons-napi/test_dataview/build/Makefile | 342 ---------- .../test_dataview/build/binding.Makefile | 6 - .../test_dataview/build/config.gypi | 70 -- .../test_dataview/build/gyp-mac-tool | 611 ------------------ .../build/test_dataview.target.mk | 178 ----- .../test_env_sharing/build/Makefile | 347 ---------- .../test_env_sharing/build/binding.Makefile | 6 - .../build/compare_env.target.mk | 178 ----- .../test_env_sharing/build/config.gypi | 70 -- .../test_env_sharing/build/gyp-mac-tool | 611 ------------------ .../build/store_env.target.mk | 178 ----- test/addons-napi/test_error/build/Makefile | 342 ---------- .../test_error/build/binding.Makefile | 6 - test/addons-napi/test_error/build/config.gypi | 70 -- .../addons-napi/test_error/build/gyp-mac-tool | 611 ------------------ .../test_error/build/test_error.target.mk | 178 ----- .../addons-napi/test_exception/build/Makefile | 342 ---------- .../test_exception/build/binding.Makefile | 6 - .../test_exception/build/config.gypi | 70 -- .../test_exception/build/gyp-mac-tool | 611 ------------------ .../build/test_exception.target.mk | 178 ----- test/addons-napi/test_fatal/build/Makefile | 342 ---------- .../test_fatal/build/binding.Makefile | 6 - test/addons-napi/test_fatal/build/config.gypi | 70 -- .../addons-napi/test_fatal/build/gyp-mac-tool | 611 ------------------ .../test_fatal/build/test_fatal.target.mk | 178 ----- test/addons-napi/test_function/build/Makefile | 342 ---------- .../test_function/build/binding.Makefile | 6 - .../test_function/build/config.gypi | 70 -- .../test_function/build/gyp-mac-tool | 611 ------------------ .../build/test_function.target.mk | 178 ----- test/addons-napi/test_general/build/Makefile | 342 ---------- .../test_general/build/binding.Makefile | 6 - .../test_general/build/config.gypi | 70 -- .../test_general/build/gyp-mac-tool | 611 ------------------ .../test_general/build/test_general.target.mk | 178 ----- .../test_handle_scope/build/Makefile | 342 ---------- .../test_handle_scope/build/binding.Makefile | 6 - .../test_handle_scope/build/config.gypi | 70 -- .../test_handle_scope/build/gyp-mac-tool | 611 ------------------ .../build/test_handle_scope.target.mk | 178 ----- .../test_make_callback/build/Makefile | 342 ---------- .../test_make_callback/build/binding.Makefile | 6 - .../build/binding.target.mk | 178 ----- .../test_make_callback/build/config.gypi | 70 -- .../test_make_callback/build/gyp-mac-tool | 611 ------------------ .../test_make_callback_recurse/build/Makefile | 342 ---------- .../build/binding.Makefile | 6 - .../build/binding.target.mk | 178 ----- .../build/config.gypi | 70 -- .../build/gyp-mac-tool | 611 ------------------ test/addons-napi/test_number/build/Makefile | 342 ---------- .../test_number/build/binding.Makefile | 6 - .../addons-napi/test_number/build/config.gypi | 70 -- .../test_number/build/gyp-mac-tool | 611 ------------------ .../test_number/build/test_number.target.mk | 178 ----- test/addons-napi/test_object/build/Makefile | 342 ---------- .../test_object/build/binding.Makefile | 6 - .../addons-napi/test_object/build/config.gypi | 70 -- .../test_object/build/gyp-mac-tool | 611 ------------------ .../test_object/build/test_object.target.mk | 178 ----- test/addons-napi/test_promise/build/Makefile | 342 ---------- .../test_promise/build/binding.Makefile | 6 - .../test_promise/build/config.gypi | 70 -- .../test_promise/build/gyp-mac-tool | 611 ------------------ .../test_promise/build/test_promise.target.mk | 178 ----- .../test_properties/build/Makefile | 342 ---------- .../test_properties/build/binding.Makefile | 6 - .../test_properties/build/config.gypi | 70 -- .../test_properties/build/gyp-mac-tool | 611 ------------------ .../build/test_properties.target.mk | 178 ----- .../addons-napi/test_reference/build/Makefile | 342 ---------- .../test_reference/build/binding.Makefile | 6 - .../test_reference/build/config.gypi | 70 -- .../test_reference/build/gyp-mac-tool | 611 ------------------ .../build/test_reference.target.mk | 178 ----- test/addons-napi/test_string/build/Makefile | 342 ---------- .../test_string/build/binding.Makefile | 6 - .../addons-napi/test_string/build/config.gypi | 70 -- .../test_string/build/gyp-mac-tool | 611 ------------------ .../test_string/build/test_string.target.mk | 178 ----- test/addons-napi/test_symbol/build/Makefile | 342 ---------- .../test_symbol/build/binding.Makefile | 6 - .../addons-napi/test_symbol/build/config.gypi | 70 -- .../test_symbol/build/gyp-mac-tool | 611 ------------------ .../test_symbol/build/test_symbol.target.mk | 178 ----- .../test_typedarray/build/Makefile | 342 ---------- .../test_typedarray/build/binding.Makefile | 6 - .../test_typedarray/build/config.gypi | 70 -- .../test_typedarray/build/gyp-mac-tool | 611 ------------------ .../build/test_typedarray.target.mk | 178 ----- 156 files changed, 37603 deletions(-) delete mode 100644 test/addons-napi/1_hello_world/build/Makefile delete mode 100644 test/addons-napi/1_hello_world/build/binding.Makefile delete mode 100644 test/addons-napi/1_hello_world/build/binding.target.mk delete mode 100644 test/addons-napi/1_hello_world/build/config.gypi delete mode 100755 test/addons-napi/1_hello_world/build/gyp-mac-tool delete mode 100644 test/addons-napi/2_function_arguments/build/Makefile delete mode 100644 test/addons-napi/2_function_arguments/build/binding.Makefile delete mode 100644 test/addons-napi/2_function_arguments/build/binding.target.mk delete mode 100644 test/addons-napi/2_function_arguments/build/config.gypi delete mode 100755 test/addons-napi/2_function_arguments/build/gyp-mac-tool delete mode 100644 test/addons-napi/3_callbacks/build/Makefile delete mode 100644 test/addons-napi/3_callbacks/build/binding.Makefile delete mode 100644 test/addons-napi/3_callbacks/build/binding.target.mk delete mode 100644 test/addons-napi/3_callbacks/build/config.gypi delete mode 100755 test/addons-napi/3_callbacks/build/gyp-mac-tool delete mode 100644 test/addons-napi/4_object_factory/build/Makefile delete mode 100644 test/addons-napi/4_object_factory/build/binding.Makefile delete mode 100644 test/addons-napi/4_object_factory/build/binding.target.mk delete mode 100644 test/addons-napi/4_object_factory/build/config.gypi delete mode 100755 test/addons-napi/4_object_factory/build/gyp-mac-tool delete mode 100644 test/addons-napi/5_function_factory/build/Makefile delete mode 100644 test/addons-napi/5_function_factory/build/binding.Makefile delete mode 100644 test/addons-napi/5_function_factory/build/binding.target.mk delete mode 100644 test/addons-napi/5_function_factory/build/config.gypi delete mode 100755 test/addons-napi/5_function_factory/build/gyp-mac-tool delete mode 100644 test/addons-napi/6_object_wrap/build/Makefile delete mode 100644 test/addons-napi/6_object_wrap/build/binding.Makefile delete mode 100644 test/addons-napi/6_object_wrap/build/binding.target.mk delete mode 100644 test/addons-napi/6_object_wrap/build/config.gypi delete mode 100755 test/addons-napi/6_object_wrap/build/gyp-mac-tool delete mode 100644 test/addons-napi/7_factory_wrap/build/Makefile delete mode 100644 test/addons-napi/7_factory_wrap/build/binding.Makefile delete mode 100644 test/addons-napi/7_factory_wrap/build/binding.target.mk delete mode 100644 test/addons-napi/7_factory_wrap/build/config.gypi delete mode 100755 test/addons-napi/7_factory_wrap/build/gyp-mac-tool delete mode 100644 test/addons-napi/8_passing_wrapped/build/Makefile delete mode 100644 test/addons-napi/8_passing_wrapped/build/binding.Makefile delete mode 100644 test/addons-napi/8_passing_wrapped/build/binding.target.mk delete mode 100644 test/addons-napi/8_passing_wrapped/build/config.gypi delete mode 100755 test/addons-napi/8_passing_wrapped/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_array/build/Makefile delete mode 100644 test/addons-napi/test_array/build/binding.Makefile delete mode 100644 test/addons-napi/test_array/build/config.gypi delete mode 100755 test/addons-napi/test_array/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_array/build/test_array.target.mk delete mode 100644 test/addons-napi/test_async/build/Makefile delete mode 100644 test/addons-napi/test_async/build/binding.Makefile delete mode 100644 test/addons-napi/test_async/build/config.gypi delete mode 100755 test/addons-napi/test_async/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_async/build/test_async.target.mk delete mode 100644 test/addons-napi/test_buffer/build/Makefile delete mode 100644 test/addons-napi/test_buffer/build/binding.Makefile delete mode 100644 test/addons-napi/test_buffer/build/config.gypi delete mode 100755 test/addons-napi/test_buffer/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_buffer/build/test_buffer.target.mk delete mode 100644 test/addons-napi/test_constructor/build/Makefile delete mode 100644 test/addons-napi/test_constructor/build/binding.Makefile delete mode 100644 test/addons-napi/test_constructor/build/config.gypi delete mode 100755 test/addons-napi/test_constructor/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_constructor/build/test_constructor.target.mk delete mode 100644 test/addons-napi/test_conversions/build/Makefile delete mode 100644 test/addons-napi/test_conversions/build/binding.Makefile delete mode 100644 test/addons-napi/test_conversions/build/config.gypi delete mode 100755 test/addons-napi/test_conversions/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_conversions/build/test_conversions.target.mk delete mode 100644 test/addons-napi/test_dataview/build/Makefile delete mode 100644 test/addons-napi/test_dataview/build/binding.Makefile delete mode 100644 test/addons-napi/test_dataview/build/config.gypi delete mode 100755 test/addons-napi/test_dataview/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_dataview/build/test_dataview.target.mk delete mode 100644 test/addons-napi/test_env_sharing/build/Makefile delete mode 100644 test/addons-napi/test_env_sharing/build/binding.Makefile delete mode 100644 test/addons-napi/test_env_sharing/build/compare_env.target.mk delete mode 100644 test/addons-napi/test_env_sharing/build/config.gypi delete mode 100755 test/addons-napi/test_env_sharing/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_env_sharing/build/store_env.target.mk delete mode 100644 test/addons-napi/test_error/build/Makefile delete mode 100644 test/addons-napi/test_error/build/binding.Makefile delete mode 100644 test/addons-napi/test_error/build/config.gypi delete mode 100755 test/addons-napi/test_error/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_error/build/test_error.target.mk delete mode 100644 test/addons-napi/test_exception/build/Makefile delete mode 100644 test/addons-napi/test_exception/build/binding.Makefile delete mode 100644 test/addons-napi/test_exception/build/config.gypi delete mode 100755 test/addons-napi/test_exception/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_exception/build/test_exception.target.mk delete mode 100644 test/addons-napi/test_fatal/build/Makefile delete mode 100644 test/addons-napi/test_fatal/build/binding.Makefile delete mode 100644 test/addons-napi/test_fatal/build/config.gypi delete mode 100755 test/addons-napi/test_fatal/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_fatal/build/test_fatal.target.mk delete mode 100644 test/addons-napi/test_function/build/Makefile delete mode 100644 test/addons-napi/test_function/build/binding.Makefile delete mode 100644 test/addons-napi/test_function/build/config.gypi delete mode 100755 test/addons-napi/test_function/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_function/build/test_function.target.mk delete mode 100644 test/addons-napi/test_general/build/Makefile delete mode 100644 test/addons-napi/test_general/build/binding.Makefile delete mode 100644 test/addons-napi/test_general/build/config.gypi delete mode 100755 test/addons-napi/test_general/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_general/build/test_general.target.mk delete mode 100644 test/addons-napi/test_handle_scope/build/Makefile delete mode 100644 test/addons-napi/test_handle_scope/build/binding.Makefile delete mode 100644 test/addons-napi/test_handle_scope/build/config.gypi delete mode 100755 test/addons-napi/test_handle_scope/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_handle_scope/build/test_handle_scope.target.mk delete mode 100644 test/addons-napi/test_make_callback/build/Makefile delete mode 100644 test/addons-napi/test_make_callback/build/binding.Makefile delete mode 100644 test/addons-napi/test_make_callback/build/binding.target.mk delete mode 100644 test/addons-napi/test_make_callback/build/config.gypi delete mode 100755 test/addons-napi/test_make_callback/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_make_callback_recurse/build/Makefile delete mode 100644 test/addons-napi/test_make_callback_recurse/build/binding.Makefile delete mode 100644 test/addons-napi/test_make_callback_recurse/build/binding.target.mk delete mode 100644 test/addons-napi/test_make_callback_recurse/build/config.gypi delete mode 100755 test/addons-napi/test_make_callback_recurse/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_number/build/Makefile delete mode 100644 test/addons-napi/test_number/build/binding.Makefile delete mode 100644 test/addons-napi/test_number/build/config.gypi delete mode 100755 test/addons-napi/test_number/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_number/build/test_number.target.mk delete mode 100644 test/addons-napi/test_object/build/Makefile delete mode 100644 test/addons-napi/test_object/build/binding.Makefile delete mode 100644 test/addons-napi/test_object/build/config.gypi delete mode 100755 test/addons-napi/test_object/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_object/build/test_object.target.mk delete mode 100644 test/addons-napi/test_promise/build/Makefile delete mode 100644 test/addons-napi/test_promise/build/binding.Makefile delete mode 100644 test/addons-napi/test_promise/build/config.gypi delete mode 100755 test/addons-napi/test_promise/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_promise/build/test_promise.target.mk delete mode 100644 test/addons-napi/test_properties/build/Makefile delete mode 100644 test/addons-napi/test_properties/build/binding.Makefile delete mode 100644 test/addons-napi/test_properties/build/config.gypi delete mode 100755 test/addons-napi/test_properties/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_properties/build/test_properties.target.mk delete mode 100644 test/addons-napi/test_reference/build/Makefile delete mode 100644 test/addons-napi/test_reference/build/binding.Makefile delete mode 100644 test/addons-napi/test_reference/build/config.gypi delete mode 100755 test/addons-napi/test_reference/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_reference/build/test_reference.target.mk delete mode 100644 test/addons-napi/test_string/build/Makefile delete mode 100644 test/addons-napi/test_string/build/binding.Makefile delete mode 100644 test/addons-napi/test_string/build/config.gypi delete mode 100755 test/addons-napi/test_string/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_string/build/test_string.target.mk delete mode 100644 test/addons-napi/test_symbol/build/Makefile delete mode 100644 test/addons-napi/test_symbol/build/binding.Makefile delete mode 100644 test/addons-napi/test_symbol/build/config.gypi delete mode 100755 test/addons-napi/test_symbol/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_symbol/build/test_symbol.target.mk delete mode 100644 test/addons-napi/test_typedarray/build/Makefile delete mode 100644 test/addons-napi/test_typedarray/build/binding.Makefile delete mode 100644 test/addons-napi/test_typedarray/build/config.gypi delete mode 100755 test/addons-napi/test_typedarray/build/gyp-mac-tool delete mode 100644 test/addons-napi/test_typedarray/build/test_typedarray.target.mk diff --git a/test/addons-napi/1_hello_world/build/Makefile b/test/addons-napi/1_hello_world/build/Makefile deleted file mode 100644 index cb2e7e62b06c84..00000000000000 --- a/test/addons-napi/1_hello_world/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/1_hello_world/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/1_hello_world" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/1_hello_world/build/binding.Makefile b/test/addons-napi/1_hello_world/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/1_hello_world/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/1_hello_world/build/binding.target.mk b/test/addons-napi/1_hello_world/build/binding.target.mk deleted file mode 100644 index a707bab99ab136..00000000000000 --- a/test/addons-napi/1_hello_world/build/binding.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/1_hello_world/build/config.gypi b/test/addons-napi/1_hello_world/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/1_hello_world/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/1_hello_world/build/gyp-mac-tool b/test/addons-napi/1_hello_world/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/1_hello_world/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/2_function_arguments/build/Makefile b/test/addons-napi/2_function_arguments/build/Makefile deleted file mode 100644 index 0e2a4d23079f0d..00000000000000 --- a/test/addons-napi/2_function_arguments/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/2_function_arguments/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/2_function_arguments" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/2_function_arguments/build/binding.Makefile b/test/addons-napi/2_function_arguments/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/2_function_arguments/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/2_function_arguments/build/binding.target.mk b/test/addons-napi/2_function_arguments/build/binding.target.mk deleted file mode 100644 index a707bab99ab136..00000000000000 --- a/test/addons-napi/2_function_arguments/build/binding.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/2_function_arguments/build/config.gypi b/test/addons-napi/2_function_arguments/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/2_function_arguments/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/2_function_arguments/build/gyp-mac-tool b/test/addons-napi/2_function_arguments/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/2_function_arguments/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/3_callbacks/build/Makefile b/test/addons-napi/3_callbacks/build/Makefile deleted file mode 100644 index 1d8dde16395e8f..00000000000000 --- a/test/addons-napi/3_callbacks/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/3_callbacks/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/3_callbacks" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/3_callbacks/build/binding.Makefile b/test/addons-napi/3_callbacks/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/3_callbacks/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/3_callbacks/build/binding.target.mk b/test/addons-napi/3_callbacks/build/binding.target.mk deleted file mode 100644 index a707bab99ab136..00000000000000 --- a/test/addons-napi/3_callbacks/build/binding.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/3_callbacks/build/config.gypi b/test/addons-napi/3_callbacks/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/3_callbacks/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/3_callbacks/build/gyp-mac-tool b/test/addons-napi/3_callbacks/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/3_callbacks/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/4_object_factory/build/Makefile b/test/addons-napi/4_object_factory/build/Makefile deleted file mode 100644 index 90bca121611c40..00000000000000 --- a/test/addons-napi/4_object_factory/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/4_object_factory/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/4_object_factory" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/4_object_factory/build/binding.Makefile b/test/addons-napi/4_object_factory/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/4_object_factory/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/4_object_factory/build/binding.target.mk b/test/addons-napi/4_object_factory/build/binding.target.mk deleted file mode 100644 index a707bab99ab136..00000000000000 --- a/test/addons-napi/4_object_factory/build/binding.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/4_object_factory/build/config.gypi b/test/addons-napi/4_object_factory/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/4_object_factory/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/4_object_factory/build/gyp-mac-tool b/test/addons-napi/4_object_factory/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/4_object_factory/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/5_function_factory/build/Makefile b/test/addons-napi/5_function_factory/build/Makefile deleted file mode 100644 index 6b2c6c7e501a65..00000000000000 --- a/test/addons-napi/5_function_factory/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/5_function_factory/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/5_function_factory" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/5_function_factory/build/binding.Makefile b/test/addons-napi/5_function_factory/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/5_function_factory/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/5_function_factory/build/binding.target.mk b/test/addons-napi/5_function_factory/build/binding.target.mk deleted file mode 100644 index a707bab99ab136..00000000000000 --- a/test/addons-napi/5_function_factory/build/binding.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/5_function_factory/build/config.gypi b/test/addons-napi/5_function_factory/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/5_function_factory/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/5_function_factory/build/gyp-mac-tool b/test/addons-napi/5_function_factory/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/5_function_factory/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/6_object_wrap/build/Makefile b/test/addons-napi/6_object_wrap/build/Makefile deleted file mode 100644 index 55e2154f05c0de..00000000000000 --- a/test/addons-napi/6_object_wrap/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/6_object_wrap/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/6_object_wrap" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/6_object_wrap/build/binding.Makefile b/test/addons-napi/6_object_wrap/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/6_object_wrap/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/6_object_wrap/build/binding.target.mk b/test/addons-napi/6_object_wrap/build/binding.target.mk deleted file mode 100644 index 6bca2c1f4f6abd..00000000000000 --- a/test/addons-napi/6_object_wrap/build/binding.target.mk +++ /dev/null @@ -1,179 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o \ - $(obj).target/$(TARGET)/myobject.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/6_object_wrap/build/config.gypi b/test/addons-napi/6_object_wrap/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/6_object_wrap/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/6_object_wrap/build/gyp-mac-tool b/test/addons-napi/6_object_wrap/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/6_object_wrap/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/7_factory_wrap/build/Makefile b/test/addons-napi/7_factory_wrap/build/Makefile deleted file mode 100644 index 24479d1ac381a2..00000000000000 --- a/test/addons-napi/7_factory_wrap/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/7_factory_wrap/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/7_factory_wrap" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/7_factory_wrap/build/binding.Makefile b/test/addons-napi/7_factory_wrap/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/7_factory_wrap/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/7_factory_wrap/build/binding.target.mk b/test/addons-napi/7_factory_wrap/build/binding.target.mk deleted file mode 100644 index 6bca2c1f4f6abd..00000000000000 --- a/test/addons-napi/7_factory_wrap/build/binding.target.mk +++ /dev/null @@ -1,179 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o \ - $(obj).target/$(TARGET)/myobject.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/7_factory_wrap/build/config.gypi b/test/addons-napi/7_factory_wrap/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/7_factory_wrap/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/7_factory_wrap/build/gyp-mac-tool b/test/addons-napi/7_factory_wrap/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/7_factory_wrap/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/8_passing_wrapped/build/Makefile b/test/addons-napi/8_passing_wrapped/build/Makefile deleted file mode 100644 index 260978b733a982..00000000000000 --- a/test/addons-napi/8_passing_wrapped/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/8_passing_wrapped/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/8_passing_wrapped" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/8_passing_wrapped/build/binding.Makefile b/test/addons-napi/8_passing_wrapped/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/8_passing_wrapped/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/8_passing_wrapped/build/binding.target.mk b/test/addons-napi/8_passing_wrapped/build/binding.target.mk deleted file mode 100644 index 6bca2c1f4f6abd..00000000000000 --- a/test/addons-napi/8_passing_wrapped/build/binding.target.mk +++ /dev/null @@ -1,179 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o \ - $(obj).target/$(TARGET)/myobject.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/8_passing_wrapped/build/config.gypi b/test/addons-napi/8_passing_wrapped/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/8_passing_wrapped/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/8_passing_wrapped/build/gyp-mac-tool b/test/addons-napi/8_passing_wrapped/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/8_passing_wrapped/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_array/build/Makefile b/test/addons-napi/test_array/build/Makefile deleted file mode 100644 index 0293bded3c6164..00000000000000 --- a/test/addons-napi/test_array/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_array.target.mk)))),) - include test_array.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_array/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_array" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_array/build/binding.Makefile b/test/addons-napi/test_array/build/binding.Makefile deleted file mode 100644 index c393342cfa470c..00000000000000 --- a/test/addons-napi/test_array/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_array diff --git a/test/addons-napi/test_array/build/config.gypi b/test/addons-napi/test_array/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_array/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_array/build/gyp-mac-tool b/test/addons-napi/test_array/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_array/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_array/build/test_array.target.mk b/test/addons-napi/test_array/build/test_array.target.mk deleted file mode 100644 index ada554d767be62..00000000000000 --- a/test/addons-napi/test_array/build/test_array.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_array -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_array' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_array' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_array.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_array.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_array.node: LIBS := $(LIBS) -$(builddir)/test_array.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_array.node: TOOLSET := $(TOOLSET) -$(builddir)/test_array.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_array.node -# Add target alias -.PHONY: test_array -test_array: $(builddir)/test_array.node - -# Short alias for building this executable. -.PHONY: test_array.node -test_array.node: $(builddir)/test_array.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_array.node - diff --git a/test/addons-napi/test_async/build/Makefile b/test/addons-napi/test_async/build/Makefile deleted file mode 100644 index 9f28d1a37e76fa..00000000000000 --- a/test/addons-napi/test_async/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_async.target.mk)))),) - include test_async.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_async/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_async" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_async/build/binding.Makefile b/test/addons-napi/test_async/build/binding.Makefile deleted file mode 100644 index b3712e0a3b057a..00000000000000 --- a/test/addons-napi/test_async/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_async diff --git a/test/addons-napi/test_async/build/config.gypi b/test/addons-napi/test_async/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_async/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_async/build/gyp-mac-tool b/test/addons-napi/test_async/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_async/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_async/build/test_async.target.mk b/test/addons-napi/test_async/build/test_async.target.mk deleted file mode 100644 index 86fcb28e45668d..00000000000000 --- a/test/addons-napi/test_async/build/test_async.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_async -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_async' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_async' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_async.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_async.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_async.node: LIBS := $(LIBS) -$(builddir)/test_async.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_async.node: TOOLSET := $(TOOLSET) -$(builddir)/test_async.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_async.node -# Add target alias -.PHONY: test_async -test_async: $(builddir)/test_async.node - -# Short alias for building this executable. -.PHONY: test_async.node -test_async.node: $(builddir)/test_async.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_async.node - diff --git a/test/addons-napi/test_buffer/build/Makefile b/test/addons-napi/test_buffer/build/Makefile deleted file mode 100644 index 3ccdeb364558af..00000000000000 --- a/test/addons-napi/test_buffer/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_buffer.target.mk)))),) - include test_buffer.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_buffer/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_buffer" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_buffer/build/binding.Makefile b/test/addons-napi/test_buffer/build/binding.Makefile deleted file mode 100644 index 1f1489b1507bc6..00000000000000 --- a/test/addons-napi/test_buffer/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_buffer diff --git a/test/addons-napi/test_buffer/build/config.gypi b/test/addons-napi/test_buffer/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_buffer/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_buffer/build/gyp-mac-tool b/test/addons-napi/test_buffer/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_buffer/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_buffer/build/test_buffer.target.mk b/test/addons-napi/test_buffer/build/test_buffer.target.mk deleted file mode 100644 index 789051b14480fc..00000000000000 --- a/test/addons-napi/test_buffer/build/test_buffer.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_buffer -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_buffer' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_buffer' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_buffer.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_buffer.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_buffer.node: LIBS := $(LIBS) -$(builddir)/test_buffer.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_buffer.node: TOOLSET := $(TOOLSET) -$(builddir)/test_buffer.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_buffer.node -# Add target alias -.PHONY: test_buffer -test_buffer: $(builddir)/test_buffer.node - -# Short alias for building this executable. -.PHONY: test_buffer.node -test_buffer.node: $(builddir)/test_buffer.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_buffer.node - diff --git a/test/addons-napi/test_constructor/build/Makefile b/test/addons-napi/test_constructor/build/Makefile deleted file mode 100644 index eaa7fc33d320c6..00000000000000 --- a/test/addons-napi/test_constructor/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_constructor.target.mk)))),) - include test_constructor.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_constructor/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_constructor" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_constructor/build/binding.Makefile b/test/addons-napi/test_constructor/build/binding.Makefile deleted file mode 100644 index f6f9987fe08426..00000000000000 --- a/test/addons-napi/test_constructor/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_constructor diff --git a/test/addons-napi/test_constructor/build/config.gypi b/test/addons-napi/test_constructor/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_constructor/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_constructor/build/gyp-mac-tool b/test/addons-napi/test_constructor/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_constructor/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_constructor/build/test_constructor.target.mk b/test/addons-napi/test_constructor/build/test_constructor.target.mk deleted file mode 100644 index 9af9de38c6467c..00000000000000 --- a/test/addons-napi/test_constructor/build/test_constructor.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_constructor -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_constructor' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_constructor' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_constructor.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_constructor.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_constructor.node: LIBS := $(LIBS) -$(builddir)/test_constructor.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_constructor.node: TOOLSET := $(TOOLSET) -$(builddir)/test_constructor.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_constructor.node -# Add target alias -.PHONY: test_constructor -test_constructor: $(builddir)/test_constructor.node - -# Short alias for building this executable. -.PHONY: test_constructor.node -test_constructor.node: $(builddir)/test_constructor.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_constructor.node - diff --git a/test/addons-napi/test_conversions/build/Makefile b/test/addons-napi/test_conversions/build/Makefile deleted file mode 100644 index f0d95d652ab64c..00000000000000 --- a/test/addons-napi/test_conversions/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_conversions.target.mk)))),) - include test_conversions.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_conversions/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_conversions" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_conversions/build/binding.Makefile b/test/addons-napi/test_conversions/build/binding.Makefile deleted file mode 100644 index 3c236072468774..00000000000000 --- a/test/addons-napi/test_conversions/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_conversions diff --git a/test/addons-napi/test_conversions/build/config.gypi b/test/addons-napi/test_conversions/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_conversions/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_conversions/build/gyp-mac-tool b/test/addons-napi/test_conversions/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_conversions/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_conversions/build/test_conversions.target.mk b/test/addons-napi/test_conversions/build/test_conversions.target.mk deleted file mode 100644 index 87273bde635499..00000000000000 --- a/test/addons-napi/test_conversions/build/test_conversions.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_conversions -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_conversions' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_conversions' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_conversions.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_conversions.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_conversions.node: LIBS := $(LIBS) -$(builddir)/test_conversions.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_conversions.node: TOOLSET := $(TOOLSET) -$(builddir)/test_conversions.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_conversions.node -# Add target alias -.PHONY: test_conversions -test_conversions: $(builddir)/test_conversions.node - -# Short alias for building this executable. -.PHONY: test_conversions.node -test_conversions.node: $(builddir)/test_conversions.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_conversions.node - diff --git a/test/addons-napi/test_dataview/build/Makefile b/test/addons-napi/test_dataview/build/Makefile deleted file mode 100644 index 9dd4843f8cf222..00000000000000 --- a/test/addons-napi/test_dataview/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_dataview.target.mk)))),) - include test_dataview.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_dataview/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_dataview" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_dataview/build/binding.Makefile b/test/addons-napi/test_dataview/build/binding.Makefile deleted file mode 100644 index 9fa814f4408abd..00000000000000 --- a/test/addons-napi/test_dataview/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_dataview diff --git a/test/addons-napi/test_dataview/build/config.gypi b/test/addons-napi/test_dataview/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_dataview/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_dataview/build/gyp-mac-tool b/test/addons-napi/test_dataview/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_dataview/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_dataview/build/test_dataview.target.mk b/test/addons-napi/test_dataview/build/test_dataview.target.mk deleted file mode 100644 index 1a2bb312a489ba..00000000000000 --- a/test/addons-napi/test_dataview/build/test_dataview.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_dataview -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_dataview' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_dataview' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_dataview.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_dataview.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_dataview.node: LIBS := $(LIBS) -$(builddir)/test_dataview.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_dataview.node: TOOLSET := $(TOOLSET) -$(builddir)/test_dataview.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_dataview.node -# Add target alias -.PHONY: test_dataview -test_dataview: $(builddir)/test_dataview.node - -# Short alias for building this executable. -.PHONY: test_dataview.node -test_dataview.node: $(builddir)/test_dataview.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_dataview.node - diff --git a/test/addons-napi/test_env_sharing/build/Makefile b/test/addons-napi/test_env_sharing/build/Makefile deleted file mode 100644 index b8333ee4553e88..00000000000000 --- a/test/addons-napi/test_env_sharing/build/Makefile +++ /dev/null @@ -1,347 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,compare_env.target.mk)))),) - include compare_env.target.mk -endif -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,store_env.target.mk)))),) - include store_env.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_env_sharing/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_env_sharing" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_env_sharing/build/binding.Makefile b/test/addons-napi/test_env_sharing/build/binding.Makefile deleted file mode 100644 index df6645aac06baf..00000000000000 --- a/test/addons-napi/test_env_sharing/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) compare_env store_env diff --git a/test/addons-napi/test_env_sharing/build/compare_env.target.mk b/test/addons-napi/test_env_sharing/build/compare_env.target.mk deleted file mode 100644 index 961db4f5157bcc..00000000000000 --- a/test/addons-napi/test_env_sharing/build/compare_env.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := compare_env -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=compare_env' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=compare_env' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/compare_env.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/compare_env.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/compare_env.node: LIBS := $(LIBS) -$(builddir)/compare_env.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/compare_env.node: TOOLSET := $(TOOLSET) -$(builddir)/compare_env.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/compare_env.node -# Add target alias -.PHONY: compare_env -compare_env: $(builddir)/compare_env.node - -# Short alias for building this executable. -.PHONY: compare_env.node -compare_env.node: $(builddir)/compare_env.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/compare_env.node - diff --git a/test/addons-napi/test_env_sharing/build/config.gypi b/test/addons-napi/test_env_sharing/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_env_sharing/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_env_sharing/build/gyp-mac-tool b/test/addons-napi/test_env_sharing/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_env_sharing/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_env_sharing/build/store_env.target.mk b/test/addons-napi/test_env_sharing/build/store_env.target.mk deleted file mode 100644 index 68930824a7c5e1..00000000000000 --- a/test/addons-napi/test_env_sharing/build/store_env.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := store_env -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=store_env' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=store_env' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/store_env.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/store_env.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/store_env.node: LIBS := $(LIBS) -$(builddir)/store_env.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/store_env.node: TOOLSET := $(TOOLSET) -$(builddir)/store_env.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/store_env.node -# Add target alias -.PHONY: store_env -store_env: $(builddir)/store_env.node - -# Short alias for building this executable. -.PHONY: store_env.node -store_env.node: $(builddir)/store_env.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/store_env.node - diff --git a/test/addons-napi/test_error/build/Makefile b/test/addons-napi/test_error/build/Makefile deleted file mode 100644 index e004646841f8f6..00000000000000 --- a/test/addons-napi/test_error/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_error.target.mk)))),) - include test_error.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_error/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_error" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_error/build/binding.Makefile b/test/addons-napi/test_error/build/binding.Makefile deleted file mode 100644 index 30f57e354eb130..00000000000000 --- a/test/addons-napi/test_error/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_error diff --git a/test/addons-napi/test_error/build/config.gypi b/test/addons-napi/test_error/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_error/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_error/build/gyp-mac-tool b/test/addons-napi/test_error/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_error/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_error/build/test_error.target.mk b/test/addons-napi/test_error/build/test_error.target.mk deleted file mode 100644 index 6cf5bbf77d3aa5..00000000000000 --- a/test/addons-napi/test_error/build/test_error.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_error -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_error' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_error' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_error.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_error.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_error.node: LIBS := $(LIBS) -$(builddir)/test_error.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_error.node: TOOLSET := $(TOOLSET) -$(builddir)/test_error.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_error.node -# Add target alias -.PHONY: test_error -test_error: $(builddir)/test_error.node - -# Short alias for building this executable. -.PHONY: test_error.node -test_error.node: $(builddir)/test_error.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_error.node - diff --git a/test/addons-napi/test_exception/build/Makefile b/test/addons-napi/test_exception/build/Makefile deleted file mode 100644 index a21781273bbc21..00000000000000 --- a/test/addons-napi/test_exception/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_exception.target.mk)))),) - include test_exception.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_exception/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_exception" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_exception/build/binding.Makefile b/test/addons-napi/test_exception/build/binding.Makefile deleted file mode 100644 index 276e036c1017ca..00000000000000 --- a/test/addons-napi/test_exception/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_exception diff --git a/test/addons-napi/test_exception/build/config.gypi b/test/addons-napi/test_exception/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_exception/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_exception/build/gyp-mac-tool b/test/addons-napi/test_exception/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_exception/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_exception/build/test_exception.target.mk b/test/addons-napi/test_exception/build/test_exception.target.mk deleted file mode 100644 index f332afc2b79d26..00000000000000 --- a/test/addons-napi/test_exception/build/test_exception.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_exception -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_exception' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_exception' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_exception.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_exception.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_exception.node: LIBS := $(LIBS) -$(builddir)/test_exception.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_exception.node: TOOLSET := $(TOOLSET) -$(builddir)/test_exception.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_exception.node -# Add target alias -.PHONY: test_exception -test_exception: $(builddir)/test_exception.node - -# Short alias for building this executable. -.PHONY: test_exception.node -test_exception.node: $(builddir)/test_exception.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_exception.node - diff --git a/test/addons-napi/test_fatal/build/Makefile b/test/addons-napi/test_fatal/build/Makefile deleted file mode 100644 index f59a23016fa8df..00000000000000 --- a/test/addons-napi/test_fatal/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_fatal.target.mk)))),) - include test_fatal.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_fatal/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_fatal" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_fatal/build/binding.Makefile b/test/addons-napi/test_fatal/build/binding.Makefile deleted file mode 100644 index c692e2fed08f96..00000000000000 --- a/test/addons-napi/test_fatal/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_fatal diff --git a/test/addons-napi/test_fatal/build/config.gypi b/test/addons-napi/test_fatal/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_fatal/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_fatal/build/gyp-mac-tool b/test/addons-napi/test_fatal/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_fatal/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_fatal/build/test_fatal.target.mk b/test/addons-napi/test_fatal/build/test_fatal.target.mk deleted file mode 100644 index e050fd9d808389..00000000000000 --- a/test/addons-napi/test_fatal/build/test_fatal.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_fatal -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_fatal' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_fatal' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_fatal.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_fatal.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_fatal.node: LIBS := $(LIBS) -$(builddir)/test_fatal.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_fatal.node: TOOLSET := $(TOOLSET) -$(builddir)/test_fatal.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_fatal.node -# Add target alias -.PHONY: test_fatal -test_fatal: $(builddir)/test_fatal.node - -# Short alias for building this executable. -.PHONY: test_fatal.node -test_fatal.node: $(builddir)/test_fatal.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_fatal.node - diff --git a/test/addons-napi/test_function/build/Makefile b/test/addons-napi/test_function/build/Makefile deleted file mode 100644 index d2a27e8fa59336..00000000000000 --- a/test/addons-napi/test_function/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_function.target.mk)))),) - include test_function.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_function/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_function" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_function/build/binding.Makefile b/test/addons-napi/test_function/build/binding.Makefile deleted file mode 100644 index 27c9b820226cc4..00000000000000 --- a/test/addons-napi/test_function/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_function diff --git a/test/addons-napi/test_function/build/config.gypi b/test/addons-napi/test_function/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_function/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_function/build/gyp-mac-tool b/test/addons-napi/test_function/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_function/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_function/build/test_function.target.mk b/test/addons-napi/test_function/build/test_function.target.mk deleted file mode 100644 index 5b17ff2409e0de..00000000000000 --- a/test/addons-napi/test_function/build/test_function.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_function -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_function' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_function' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_function.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_function.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_function.node: LIBS := $(LIBS) -$(builddir)/test_function.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_function.node: TOOLSET := $(TOOLSET) -$(builddir)/test_function.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_function.node -# Add target alias -.PHONY: test_function -test_function: $(builddir)/test_function.node - -# Short alias for building this executable. -.PHONY: test_function.node -test_function.node: $(builddir)/test_function.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_function.node - diff --git a/test/addons-napi/test_general/build/Makefile b/test/addons-napi/test_general/build/Makefile deleted file mode 100644 index d95e90cd924ca9..00000000000000 --- a/test/addons-napi/test_general/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_general.target.mk)))),) - include test_general.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_general/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_general" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_general/build/binding.Makefile b/test/addons-napi/test_general/build/binding.Makefile deleted file mode 100644 index 789cd04d5d342c..00000000000000 --- a/test/addons-napi/test_general/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_general diff --git a/test/addons-napi/test_general/build/config.gypi b/test/addons-napi/test_general/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_general/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_general/build/gyp-mac-tool b/test/addons-napi/test_general/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_general/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_general/build/test_general.target.mk b/test/addons-napi/test_general/build/test_general.target.mk deleted file mode 100644 index 446938d67589e4..00000000000000 --- a/test/addons-napi/test_general/build/test_general.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_general -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_general' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_general' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_general.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_general.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_general.node: LIBS := $(LIBS) -$(builddir)/test_general.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_general.node: TOOLSET := $(TOOLSET) -$(builddir)/test_general.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_general.node -# Add target alias -.PHONY: test_general -test_general: $(builddir)/test_general.node - -# Short alias for building this executable. -.PHONY: test_general.node -test_general.node: $(builddir)/test_general.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_general.node - diff --git a/test/addons-napi/test_handle_scope/build/Makefile b/test/addons-napi/test_handle_scope/build/Makefile deleted file mode 100644 index c8a1d43e08f8e8..00000000000000 --- a/test/addons-napi/test_handle_scope/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_handle_scope.target.mk)))),) - include test_handle_scope.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_handle_scope/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_handle_scope" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_handle_scope/build/binding.Makefile b/test/addons-napi/test_handle_scope/build/binding.Makefile deleted file mode 100644 index 3999f7aa446a9b..00000000000000 --- a/test/addons-napi/test_handle_scope/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_handle_scope diff --git a/test/addons-napi/test_handle_scope/build/config.gypi b/test/addons-napi/test_handle_scope/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_handle_scope/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_handle_scope/build/gyp-mac-tool b/test/addons-napi/test_handle_scope/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_handle_scope/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_handle_scope/build/test_handle_scope.target.mk b/test/addons-napi/test_handle_scope/build/test_handle_scope.target.mk deleted file mode 100644 index decab157b0082c..00000000000000 --- a/test/addons-napi/test_handle_scope/build/test_handle_scope.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_handle_scope -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_handle_scope' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_handle_scope' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_handle_scope.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_handle_scope.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_handle_scope.node: LIBS := $(LIBS) -$(builddir)/test_handle_scope.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_handle_scope.node: TOOLSET := $(TOOLSET) -$(builddir)/test_handle_scope.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_handle_scope.node -# Add target alias -.PHONY: test_handle_scope -test_handle_scope: $(builddir)/test_handle_scope.node - -# Short alias for building this executable. -.PHONY: test_handle_scope.node -test_handle_scope.node: $(builddir)/test_handle_scope.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_handle_scope.node - diff --git a/test/addons-napi/test_make_callback/build/Makefile b/test/addons-napi/test_make_callback/build/Makefile deleted file mode 100644 index f3fd26bef84d07..00000000000000 --- a/test/addons-napi/test_make_callback/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_make_callback/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_make_callback" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_make_callback/build/binding.Makefile b/test/addons-napi/test_make_callback/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/test_make_callback/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/test_make_callback/build/binding.target.mk b/test/addons-napi/test_make_callback/build/binding.target.mk deleted file mode 100644 index fb7675d45de43e..00000000000000 --- a/test/addons-napi/test_make_callback/build/binding.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/test_make_callback/build/config.gypi b/test/addons-napi/test_make_callback/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_make_callback/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_make_callback/build/gyp-mac-tool b/test/addons-napi/test_make_callback/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_make_callback/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_make_callback_recurse/build/Makefile b/test/addons-napi/test_make_callback_recurse/build/Makefile deleted file mode 100644 index e7e9365c29f899..00000000000000 --- a/test/addons-napi/test_make_callback_recurse/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,binding.target.mk)))),) - include binding.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_make_callback_recurse/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_make_callback_recurse" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_make_callback_recurse/build/binding.Makefile b/test/addons-napi/test_make_callback_recurse/build/binding.Makefile deleted file mode 100644 index 0556c70f61905f..00000000000000 --- a/test/addons-napi/test_make_callback_recurse/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) binding diff --git a/test/addons-napi/test_make_callback_recurse/build/binding.target.mk b/test/addons-napi/test_make_callback_recurse/build/binding.target.mk deleted file mode 100644 index fb7675d45de43e..00000000000000 --- a/test/addons-napi/test_make_callback_recurse/build/binding.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := binding -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=binding' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/binding.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: LIBS := $(LIBS) -$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/binding.node: TOOLSET := $(TOOLSET) -$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/binding.node -# Add target alias -.PHONY: binding -binding: $(builddir)/binding.node - -# Short alias for building this executable. -.PHONY: binding.node -binding.node: $(builddir)/binding.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/binding.node - diff --git a/test/addons-napi/test_make_callback_recurse/build/config.gypi b/test/addons-napi/test_make_callback_recurse/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_make_callback_recurse/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_make_callback_recurse/build/gyp-mac-tool b/test/addons-napi/test_make_callback_recurse/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_make_callback_recurse/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_number/build/Makefile b/test/addons-napi/test_number/build/Makefile deleted file mode 100644 index 49b26eb6dc8f10..00000000000000 --- a/test/addons-napi/test_number/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_number.target.mk)))),) - include test_number.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_number/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_number" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_number/build/binding.Makefile b/test/addons-napi/test_number/build/binding.Makefile deleted file mode 100644 index f2bc3a7d0e01e0..00000000000000 --- a/test/addons-napi/test_number/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_number diff --git a/test/addons-napi/test_number/build/config.gypi b/test/addons-napi/test_number/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_number/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_number/build/gyp-mac-tool b/test/addons-napi/test_number/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_number/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_number/build/test_number.target.mk b/test/addons-napi/test_number/build/test_number.target.mk deleted file mode 100644 index 15e104d6940ccf..00000000000000 --- a/test/addons-napi/test_number/build/test_number.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_number -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_number' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_number' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_number.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_number.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_number.node: LIBS := $(LIBS) -$(builddir)/test_number.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_number.node: TOOLSET := $(TOOLSET) -$(builddir)/test_number.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_number.node -# Add target alias -.PHONY: test_number -test_number: $(builddir)/test_number.node - -# Short alias for building this executable. -.PHONY: test_number.node -test_number.node: $(builddir)/test_number.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_number.node - diff --git a/test/addons-napi/test_object/build/Makefile b/test/addons-napi/test_object/build/Makefile deleted file mode 100644 index 56c8c664a2412b..00000000000000 --- a/test/addons-napi/test_object/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_object.target.mk)))),) - include test_object.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_object/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_object" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_object/build/binding.Makefile b/test/addons-napi/test_object/build/binding.Makefile deleted file mode 100644 index 1e087a29868db5..00000000000000 --- a/test/addons-napi/test_object/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_object diff --git a/test/addons-napi/test_object/build/config.gypi b/test/addons-napi/test_object/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_object/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_object/build/gyp-mac-tool b/test/addons-napi/test_object/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_object/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_object/build/test_object.target.mk b/test/addons-napi/test_object/build/test_object.target.mk deleted file mode 100644 index 511a91b3b58d5f..00000000000000 --- a/test/addons-napi/test_object/build/test_object.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_object -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_object' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_object' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_object.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_object.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_object.node: LIBS := $(LIBS) -$(builddir)/test_object.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_object.node: TOOLSET := $(TOOLSET) -$(builddir)/test_object.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_object.node -# Add target alias -.PHONY: test_object -test_object: $(builddir)/test_object.node - -# Short alias for building this executable. -.PHONY: test_object.node -test_object.node: $(builddir)/test_object.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_object.node - diff --git a/test/addons-napi/test_promise/build/Makefile b/test/addons-napi/test_promise/build/Makefile deleted file mode 100644 index f48fdd3f6616bd..00000000000000 --- a/test/addons-napi/test_promise/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_promise.target.mk)))),) - include test_promise.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_promise/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_promise" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_promise/build/binding.Makefile b/test/addons-napi/test_promise/build/binding.Makefile deleted file mode 100644 index 8070d2e1136429..00000000000000 --- a/test/addons-napi/test_promise/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_promise diff --git a/test/addons-napi/test_promise/build/config.gypi b/test/addons-napi/test_promise/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_promise/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_promise/build/gyp-mac-tool b/test/addons-napi/test_promise/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_promise/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_promise/build/test_promise.target.mk b/test/addons-napi/test_promise/build/test_promise.target.mk deleted file mode 100644 index ba1dcc23a34873..00000000000000 --- a/test/addons-napi/test_promise/build/test_promise.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_promise -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_promise' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_promise' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_promise.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_promise.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_promise.node: LIBS := $(LIBS) -$(builddir)/test_promise.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_promise.node: TOOLSET := $(TOOLSET) -$(builddir)/test_promise.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_promise.node -# Add target alias -.PHONY: test_promise -test_promise: $(builddir)/test_promise.node - -# Short alias for building this executable. -.PHONY: test_promise.node -test_promise.node: $(builddir)/test_promise.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_promise.node - diff --git a/test/addons-napi/test_properties/build/Makefile b/test/addons-napi/test_properties/build/Makefile deleted file mode 100644 index ff0ada5980ca4f..00000000000000 --- a/test/addons-napi/test_properties/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_properties.target.mk)))),) - include test_properties.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_properties/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_properties" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_properties/build/binding.Makefile b/test/addons-napi/test_properties/build/binding.Makefile deleted file mode 100644 index 7e506c1667b9e4..00000000000000 --- a/test/addons-napi/test_properties/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_properties diff --git a/test/addons-napi/test_properties/build/config.gypi b/test/addons-napi/test_properties/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_properties/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_properties/build/gyp-mac-tool b/test/addons-napi/test_properties/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_properties/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_properties/build/test_properties.target.mk b/test/addons-napi/test_properties/build/test_properties.target.mk deleted file mode 100644 index da754422d29287..00000000000000 --- a/test/addons-napi/test_properties/build/test_properties.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_properties -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_properties' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_properties' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_properties.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_properties.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_properties.node: LIBS := $(LIBS) -$(builddir)/test_properties.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_properties.node: TOOLSET := $(TOOLSET) -$(builddir)/test_properties.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_properties.node -# Add target alias -.PHONY: test_properties -test_properties: $(builddir)/test_properties.node - -# Short alias for building this executable. -.PHONY: test_properties.node -test_properties.node: $(builddir)/test_properties.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_properties.node - diff --git a/test/addons-napi/test_reference/build/Makefile b/test/addons-napi/test_reference/build/Makefile deleted file mode 100644 index 820fb750ac1f48..00000000000000 --- a/test/addons-napi/test_reference/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_reference.target.mk)))),) - include test_reference.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_reference/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_reference" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_reference/build/binding.Makefile b/test/addons-napi/test_reference/build/binding.Makefile deleted file mode 100644 index f022b877d237a8..00000000000000 --- a/test/addons-napi/test_reference/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_reference diff --git a/test/addons-napi/test_reference/build/config.gypi b/test/addons-napi/test_reference/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_reference/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_reference/build/gyp-mac-tool b/test/addons-napi/test_reference/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_reference/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_reference/build/test_reference.target.mk b/test/addons-napi/test_reference/build/test_reference.target.mk deleted file mode 100644 index 03ba8d4d99b2ec..00000000000000 --- a/test/addons-napi/test_reference/build/test_reference.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_reference -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_reference' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_reference' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_reference.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_reference.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_reference.node: LIBS := $(LIBS) -$(builddir)/test_reference.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_reference.node: TOOLSET := $(TOOLSET) -$(builddir)/test_reference.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_reference.node -# Add target alias -.PHONY: test_reference -test_reference: $(builddir)/test_reference.node - -# Short alias for building this executable. -.PHONY: test_reference.node -test_reference.node: $(builddir)/test_reference.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_reference.node - diff --git a/test/addons-napi/test_string/build/Makefile b/test/addons-napi/test_string/build/Makefile deleted file mode 100644 index 63e02566c436f9..00000000000000 --- a/test/addons-napi/test_string/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_string.target.mk)))),) - include test_string.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_string/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_string" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_string/build/binding.Makefile b/test/addons-napi/test_string/build/binding.Makefile deleted file mode 100644 index cbc34b564e10e6..00000000000000 --- a/test/addons-napi/test_string/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_string diff --git a/test/addons-napi/test_string/build/config.gypi b/test/addons-napi/test_string/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_string/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_string/build/gyp-mac-tool b/test/addons-napi/test_string/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_string/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_string/build/test_string.target.mk b/test/addons-napi/test_string/build/test_string.target.mk deleted file mode 100644 index 2338cc90a4d1ff..00000000000000 --- a/test/addons-napi/test_string/build/test_string.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_string -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_string' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_string' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_string.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_string.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_string.node: LIBS := $(LIBS) -$(builddir)/test_string.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_string.node: TOOLSET := $(TOOLSET) -$(builddir)/test_string.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_string.node -# Add target alias -.PHONY: test_string -test_string: $(builddir)/test_string.node - -# Short alias for building this executable. -.PHONY: test_string.node -test_string.node: $(builddir)/test_string.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_string.node - diff --git a/test/addons-napi/test_symbol/build/Makefile b/test/addons-napi/test_symbol/build/Makefile deleted file mode 100644 index 0ec2231666a7b5..00000000000000 --- a/test/addons-napi/test_symbol/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_symbol.target.mk)))),) - include test_symbol.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_symbol/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_symbol" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_symbol/build/binding.Makefile b/test/addons-napi/test_symbol/build/binding.Makefile deleted file mode 100644 index a19a6d8e4e0ee8..00000000000000 --- a/test/addons-napi/test_symbol/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_symbol diff --git a/test/addons-napi/test_symbol/build/config.gypi b/test/addons-napi/test_symbol/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_symbol/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_symbol/build/gyp-mac-tool b/test/addons-napi/test_symbol/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_symbol/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_symbol/build/test_symbol.target.mk b/test/addons-napi/test_symbol/build/test_symbol.target.mk deleted file mode 100644 index bbf53c2dac39d8..00000000000000 --- a/test/addons-napi/test_symbol/build/test_symbol.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_symbol -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_symbol' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_symbol' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_symbol.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_symbol.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_symbol.node: LIBS := $(LIBS) -$(builddir)/test_symbol.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_symbol.node: TOOLSET := $(TOOLSET) -$(builddir)/test_symbol.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_symbol.node -# Add target alias -.PHONY: test_symbol -test_symbol: $(builddir)/test_symbol.node - -# Short alias for building this executable. -.PHONY: test_symbol.node -test_symbol.node: $(builddir)/test_symbol.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_symbol.node - diff --git a/test/addons-napi/test_typedarray/build/Makefile b/test/addons-napi/test_typedarray/build/Makefile deleted file mode 100644 index c64c416be7b325..00000000000000 --- a/test/addons-napi/test_typedarray/build/Makefile +++ /dev/null @@ -1,342 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= g++ -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 2,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD - @$(call do_cmd,objc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD - @$(call do_cmd,objcxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,test_typedarray.target.mk)))),) - include test_typedarray.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_typedarray/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_typedarray" "-Dnode_engine=v8" binding.gyp -Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/test/addons-napi/test_typedarray/build/binding.Makefile b/test/addons-napi/test_typedarray/build/binding.Makefile deleted file mode 100644 index 940d823bd8cd5e..00000000000000 --- a/test/addons-napi/test_typedarray/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= ./build/. -.PHONY: all -all: - $(MAKE) test_typedarray diff --git a/test/addons-napi/test_typedarray/build/config.gypi b/test/addons-napi/test_typedarray/build/config.gypi deleted file mode 100644 index 6a35cfccbb772b..00000000000000 --- a/test/addons-napi/test_typedarray/build/config.gypi +++ /dev/null @@ -1,70 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "asan": 0, - "coverage": "false", - "debug_devtools": "node", - "debug_http2": "false", - "debug_nghttp2": "false", - "force_dynamic_crt": 0, - "host_arch": "x64", - "icu_data_file": "icudt59l.dat", - "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat", - "icu_endianness": "l", - "icu_gyp_path": "tools/icu/icu-generic.gyp", - "icu_locales": "en,root", - "icu_path": "deps/icu-small", - "icu_small": "true", - "icu_ver_major": "59", - "llvm_version": 0, - "node_byteorder": "little", - "node_enable_d8": "false", - "node_enable_v8_vtunejit": "false", - "node_install_npm": "true", - "node_module_version": 58, - "node_no_browser_globals": "false", - "node_prefix": "/usr/local", - "node_release_urlbase": "", - "node_shared": "false", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_use_bundled_v8": "true", - "node_use_dtrace": "true", - "node_use_etw": "false", - "node_use_lttng": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_v8_platform": "true", - "node_without_node_options": "false", - "openssl_fips": "", - "openssl_no_asm": 0, - "shlib_suffix": "58.dylib", - "target_arch": "x64", - "uv_parent_path": "/deps/uv/", - "uv_use_dtrace": "true", - "v8_enable_gdbjit": 0, - "v8_enable_i18n_support": 1, - "v8_enable_inspector": 1, - "v8_no_strict_aliasing": 1, - "v8_optimized_debug": 0, - "v8_promise_internal_field_count": 1, - "v8_random_seed": 0, - "v8_trace_maps": 0, - "v8_use_snapshot": "true", - "want_separate_host_toolset": 0, - "xcode_version": "8.0", - "nodedir": "/Users/trott/io.js", - "standalone_static_library": 1 - } -} diff --git a/test/addons-napi/test_typedarray/build/gyp-mac-tool b/test/addons-napi/test_typedarray/build/gyp-mac-tool deleted file mode 100755 index 8ef02b0493a003..00000000000000 --- a/test/addons-napi/test_typedarray/build/gyp-mac-tool +++ /dev/null @@ -1,611 +0,0 @@ -#!/usr/bin/env python -# Generated by gyp. Do not edit. -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.storyboard': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) - else: - shutil.copy(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _ConvertToBinary(self, dest): - subprocess.check_call([ - 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - - def _CopyStringsFile(self, source, dest, convert_to_binary): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source, 'rb').read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'wb') - fp.write(s.decode(input_code).encode('UTF-16')) - fp.close() - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16" - elif header.startswith("\xFF\xFE"): - return "UTF-16" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist = dict(plist.items() + json.loads(keys[0]).items()) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r'[/\s]') - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - evalue = os.environ[key] - lines = string.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = '${%s:identifier}' % key - evalue = IDENT_RE.sub('_', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - evar = '${%s:rfc1034identifier}' % key - evalue = IDENT_RE.sub('-', os.environ[key]) - lines = string.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.split('\n') - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = '\n'.join(filter(lambda x: x is not None, lines)) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') - libtool_re5 = re.compile( - r'^.*libtool: warning for library: ' + - r'.* the table of contents is empty ' + - r'\(no object file members in the library define global symbols\)$') - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env['ZERO_AR_DATE'] = '1' - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - 'xcrun', 'actool', '--output-format', 'human-readable-text', - '--compress-pngs', '--notices', '--warnings', '--errors', - ] - is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ - if is_iphone_target: - platform = os.environ['CONFIGURATION'].split('-')[-1] - if platform not in ('iphoneos', 'iphonesimulator'): - platform = 'iphonesimulator' - command_line.extend([ - '--platform', platform, '--target-device', 'iphone', - '--target-device', 'ipad', '--minimum-deployment-target', - os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', - os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), - ]) - else: - command_line.extend([ - '--platform', 'macosx', '--target-device', 'mac', - '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], - '--compile', - os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), - ]) - if keys: - keys = json.loads(keys) - for key, value in keys.iteritems(): - arg_name = '--' + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. - """ - resource_rules_path = self._InstallResourceRules(resource_rules) - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier()) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier) - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'embedded.mobileprovision') - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') - return substitutions, provisioning_data['Entitlements'] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') - if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, '*.mobileprovision')) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get( - 'Entitlements', {}).get('application-identifier', '') - for team_identifier in profile_data.get('TeamIdentifier', []): - app_id = '%s.%s' % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, profile_data, team_identifier) - if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call([ - 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - 'CFBundleIdentifier': bundle_identifier, - 'AppIdentifierPrefix': app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['INFOPLIST_PATH']) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data['CFBundleIdentifier'] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['PRODUCT_NAME'] + '.xcent') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], - 'Entitlements.plist') - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.iteritems(): - data = data.replace('$(%s)' % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/test/addons-napi/test_typedarray/build/test_typedarray.target.mk b/test/addons-napi/test_typedarray/build/test_typedarray.target.mk deleted file mode 100644 index 3f0c4a421abc27..00000000000000 --- a/test/addons-napi/test_typedarray/build/test_typedarray.target.mk +++ /dev/null @@ -1,178 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := test_typedarray -DEFS_Debug := \ - '-DNODE_GYP_MODULE_NAME=test_typedarray' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' \ - '-DV8_ENABLE_CHECKS' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -O0 \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Debug := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Debug := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Debug := - -INCS_Debug := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -DEFS_Release := \ - '-DNODE_GYP_MODULE_NAME=test_typedarray' \ - '-DUSING_UV_SHARED=1' \ - '-DUSING_V8_SHARED=1' \ - '-DV8_DEPRECATION_WARNINGS=1' \ - '-D_DARWIN_USE_64_BIT_INODE=1' \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Os \ - -gdwarf-2 \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -Wall \ - -Wendif-labels \ - -W \ - -Wno-unused-parameter - -# Flags passed to only C files. -CFLAGS_C_Release := \ - -fno-strict-aliasing - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -std=gnu++0x \ - -stdlib=libc++ \ - -fno-rtti \ - -fno-exceptions \ - -fno-threadsafe-statics \ - -fno-strict-aliasing - -# Flags passed to only ObjC files. -CFLAGS_OBJC_Release := - -# Flags passed to only ObjC++ files. -CFLAGS_OBJCC_Release := - -INCS_Release := \ - -I/Users/trott/io.js/include/node \ - -I/Users/trott/io.js/src \ - -I/Users/trott/io.js/deps/uv/include \ - -I/Users/trott/io.js/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/test_typedarray.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) -$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Debug := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LDFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first \ - -mmacosx-version-min=10.7 \ - -arch x86_64 \ - -L$(builddir) \ - -stdlib=libc++ - -LIBTOOLFLAGS_Release := \ - -undefined dynamic_lookup \ - -Wl,-no_pie \ - -Wl,-search_paths_first - -LIBS := - -$(builddir)/test_typedarray.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(builddir)/test_typedarray.node: LIBS := $(LIBS) -$(builddir)/test_typedarray.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) -$(builddir)/test_typedarray.node: TOOLSET := $(TOOLSET) -$(builddir)/test_typedarray.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(builddir)/test_typedarray.node -# Add target alias -.PHONY: test_typedarray -test_typedarray: $(builddir)/test_typedarray.node - -# Short alias for building this executable. -.PHONY: test_typedarray.node -test_typedarray.node: $(builddir)/test_typedarray.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/test_typedarray.node - From fd7d1990db1cd6b2f5d400b8f9295c4368cfdad8 Mon Sep 17 00:00:00 2001 From: Kyle Farnung Date: Tue, 27 Feb 2018 10:58:28 -0800 Subject: [PATCH 007/227] test: remove orphaned entries from status PR-URL: https://github.com/nodejs/node/pull/19042 Reviewed-By: Myles Borins --- test/inspector/inspector.status | 1 - test/known_issues/known_issues.status | 2 -- test/sequential/sequential.status | 1 - 3 files changed, 4 deletions(-) diff --git a/test/inspector/inspector.status b/test/inspector/inspector.status index 070d817b2c3ab2..ed6a782b9031a7 100644 --- a/test/inspector/inspector.status +++ b/test/inspector/inspector.status @@ -5,6 +5,5 @@ prefix inspector # sample-test : PASS,FLAKY [true] # This section applies to all platforms -test-inspector-port-zero-cluster : PASS,FLAKY [$system==win32] diff --git a/test/known_issues/known_issues.status b/test/known_issues/known_issues.status index 46c8ed32741c7d..e21913e232c03f 100644 --- a/test/known_issues/known_issues.status +++ b/test/known_issues/known_issues.status @@ -7,8 +7,6 @@ prefix known_issues [true] # This section applies to all platforms [$system==win32] -test-stdout-buffer-flush-on-exit: SKIP -test-cluster-disconnect-handles: SKIP [$system==linux] diff --git a/test/sequential/sequential.status b/test/sequential/sequential.status index d8ef1ad44a4624..8467864395ff66 100644 --- a/test/sequential/sequential.status +++ b/test/sequential/sequential.status @@ -7,7 +7,6 @@ prefix sequential [true] # This section applies to all platforms [$system==win32] -test-inspector-stop-profile-after-done: PASS, FLAKY [$system==linux] From dbe70b744c40611f6900755d57f6aaf90f8d805f Mon Sep 17 00:00:00 2001 From: Luigi Pinca Date: Wed, 17 Jan 2018 15:44:49 +0100 Subject: [PATCH 008/227] http: free the parser before emitting 'upgrade' Ensure that the parser is freed before emitting the 'connect' or 'upgrade' event. PR-URL: https://github.com/nodejs/node/pull/18209 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Jon Moss Reviewed-By: Fedor Indutny Reviewed-By: Ruben Bridgewater --- lib/_http_client.js | 2 +- .../test-http-parser-freed-before-upgrade.js | 33 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 test/parallel/test-http-parser-freed-before-upgrade.js diff --git a/lib/_http_client.js b/lib/_http_client.js index 4d71ec594743f4..9d2057814133b7 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -376,6 +376,7 @@ function socketOnData(d) { socket.removeListener('data', socketOnData); socket.removeListener('end', socketOnEnd); parser.finish(); + freeParser(parser, req, socket); var bodyHead = d.slice(bytesParsed, d.length); @@ -398,7 +399,6 @@ function socketOnData(d) { // Got Upgrade header or CONNECT method, but have no handler. socket.destroy(); } - freeParser(parser, req, socket); } else if (parser.incoming && parser.incoming.complete && // When the status code is 100 (Continue), the server will // send a final response after this client sends a request diff --git a/test/parallel/test-http-parser-freed-before-upgrade.js b/test/parallel/test-http-parser-freed-before-upgrade.js new file mode 100644 index 00000000000000..4ba1de9501681c --- /dev/null +++ b/test/parallel/test-http-parser-freed-before-upgrade.js @@ -0,0 +1,33 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const http = require('http'); + +const server = http.createServer(); + +server.on('upgrade', common.mustCall((request, socket) => { + assert.strictEqual(socket.parser, null); + socket.write([ + 'HTTP/1.1 101 Switching Protocols', + 'Connection: Upgrade', + 'Upgrade: WebSocket', + '\r\n' + ].join('\r\n')); +})); + +server.listen(common.mustCall(() => { + const request = http.get({ + port: server.address().port, + headers: { + Connection: 'Upgrade', + Upgrade: 'WebSocket' + } + }); + + request.on('upgrade', common.mustCall((response, socket) => { + assert.strictEqual(socket.parser, null); + socket.destroy(); + server.close(); + })); +})); From 64c83d7da9af5684633df48f56625f4dafca05dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=99=88=E5=88=9A?= Date: Sat, 20 Jan 2018 08:19:05 +0800 Subject: [PATCH 009/227] stream: simplify `src._readableState` to `state` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/18264 Reviewed-By: Weijia Wang Reviewed-By: Luigi Pinca Reviewed-By: Michaël Zasso Reviewed-By: Matteo Collina --- lib/_stream_readable.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 541ef1c305d8bf..dfba99cbc74d7d 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -561,8 +561,8 @@ Readable.prototype.pipe = function(dest, pipeOpts) { if (((state.pipesCount === 1 && state.pipes === dest) || (state.pipesCount > 1 && state.pipes.indexOf(dest) !== -1)) && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; increasedAwaitDrain = true; } src.pause(); From 67fd5205398bf7423de60fd0211325d32e71bac5 Mon Sep 17 00:00:00 2001 From: Ali Ijaz Sheikh Date: Wed, 24 Jan 2018 10:50:50 -0800 Subject: [PATCH 010/227] doc: reorder section on updating PR branch MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit It makes more sense to provide instructions on how to update the PR branch before instructions on pushing the commit. PR-URL: https://github.com/nodejs/node/pull/18355 Reviewed-By: Anna Henningsen Reviewed-By: Michaël Zasso Reviewed-By: Richard Lau Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Jon Moss --- COLLABORATOR_GUIDE.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/COLLABORATOR_GUIDE.md b/COLLABORATOR_GUIDE.md index 7d8d70cdac38e0..11cf78f69b806f 100644 --- a/COLLABORATOR_GUIDE.md +++ b/COLLABORATOR_GUIDE.md @@ -593,20 +593,20 @@ Validate that the commit message is properly formatted using $ git rev-list upstream/master...HEAD | xargs core-validate-commit ``` +Optional: When landing your own commits, force push the amended commit to the +branch you used to open the pull request. If your branch is called `bugfix`, +then the command would be `git push --force-with-lease origin master:bugfix`. +When the pull request is closed, this will cause the pull request to +show the purple merged status rather than the red closed status that is +usually used for pull requests that weren't merged. + Time to push it: ```text $ git push upstream master ``` -* Optional: Force push the amended commit to the branch you used to -open the pull request. If your branch is called `bugfix`, then the -command would be `git push --force-with-lease origin master:bugfix`. -When the pull request is closed, this will cause the pull request to -show the purple merged status rather than the red closed status that is -usually used for pull requests that weren't merged. Only do this when -landing your own contributions. -* Close the pull request with a "Landed in ``" comment. If +Close the pull request with a "Landed in ``" comment. If your pull request shows the purple merged status then you should still add the "Landed in .." comment if you added multiple commits. From 27d3c1a0f4dd29286daf51d6dda6ed9eda762902 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 31 Jan 2018 09:35:31 -0800 Subject: [PATCH 011/227] doc: add Gibson Fahnestock to TSC MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Welcome Gibson to the TSC! PR-URL: https://github.com/nodejs/node/pull/18481 Reviewed-By: Colin Ihrig Reviewed-By: Michaël Zasso Reviewed-By: Ali Ijaz Sheikh Reviewed-By: Daniel Bevenius Reviewed-By: Jon Moss Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Tiancheng "Timothy" Gu Reviewed-By: Evan Lucas Reviewed-By: Сковорода Никита Андреевич Reviewed-By: Richard Lau Reviewed-By: Michael Dawson --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 55ad1756f15c1e..4530eac002d185 100644 --- a/README.md +++ b/README.md @@ -248,6 +248,8 @@ For more information about the governance of the Node.js project, see **Franziska Hinkelmann** <franziska.hinkelmann@gmail.com> (she/her) * [Fishrock123](https://github.com/Fishrock123) - **Jeremiah Senkpiel** <fishrock123@rocketmail.com> +* [gibfahn](https://github.com/gibfahn) - +**Gibson Fahnestock** <gibfahn@gmail.com> (he/him) * [indutny](https://github.com/indutny) - **Fedor Indutny** <fedor.indutny@gmail.com> * [jasnell](https://github.com/jasnell) - From 43839f160149c023c8d270414956141dd945fafe Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 31 Jan 2018 09:36:51 -0800 Subject: [PATCH 012/227] doc: move Brian White to TSC Emeriti list MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/18482 Reviewed-By: Colin Ihrig Reviewed-By: Michaël Zasso Reviewed-By: Daniel Bevenius Reviewed-By: Ali Ijaz Sheikh Reviewed-By: Jon Moss Reviewed-By: Evan Lucas Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater Reviewed-By: Michael Dawson --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 4530eac002d185..53471775067632 100644 --- a/README.md +++ b/README.md @@ -260,8 +260,6 @@ For more information about the governance of the Node.js project, see **Matteo Collina** <matteo.collina@gmail.com> (he/him) * [mhdawson](https://github.com/mhdawson) - **Michael Dawson** <michael_dawson@ca.ibm.com> (he/him) -* [mscdex](https://github.com/mscdex) - -**Brian White** <mscdex@mscdex.net> * [MylesBorins](https://github.com/MylesBorins) - **Myles Borins** <myles.borins@gmail.com> (he/him) * [ofrobots](https://github.com/ofrobots) - @@ -287,6 +285,8 @@ For more information about the governance of the Node.js project, see **Isaac Z. Schlueter** <i@izs.me> * [joshgav](https://github.com/joshgav) - **Josh Gavant** <josh.gavant@outlook.com> +* [mscdex](https://github.com/mscdex) - +**Brian White** <mscdex@mscdex.net> * [nebrius](https://github.com/nebrius) - **Bryan Hughes** <bryan@nebri.us> * [orangemocha](https://github.com/orangemocha) - From 32089bcbc1c537632b8f89ba4165bdeca404e5c2 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 31 Jan 2018 09:53:10 -0800 Subject: [PATCH 013/227] doc: streamline README intro MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Shorten text that is duplicated from website and supply link. PR-URL: https://github.com/nodejs/node/pull/18483 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Сковорода Никита Андреевич Reviewed-By: Luigi Pinca Reviewed-By: Joyee Cheung Reviewed-By: Daniel Bevenius Reviewed-By: Ruben Bridgewater --- README.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 53471775067632..ad3d838dd58076 100644 --- a/README.md +++ b/README.md @@ -7,10 +7,9 @@

-Node.js is a JavaScript runtime built on Chrome's V8 JavaScript engine. Node.js -uses an event-driven, non-blocking I/O model that makes it lightweight and -efficient. The Node.js package ecosystem, [npm][], is the largest ecosystem of -open source libraries in the world. +Node.js is a JavaScript runtime built on Chrome's V8 JavaScript engine. For +more information on using Node.js, see the +[Node.js Website][]. The Node.js project is supported by the [Node.js Foundation](https://nodejs.org/en/foundation/). Contributions, @@ -596,7 +595,6 @@ Previous releases may also have been signed with one of the following GPG keys: * [Contributing to the project][] * [Working Groups][] -[npm]: https://www.npmjs.com [Code of Conduct]: https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md [Contributing to the project]: CONTRIBUTING.md [Node.js Help]: https://github.com/nodejs/help From b2a2a55271b609fd09262509ec7f407006bff472 Mon Sep 17 00:00:00 2001 From: jvelezpo Date: Thu, 25 Jan 2018 14:47:52 -0500 Subject: [PATCH 014/227] test: verify the shell option works properly on execFile Useful for executing in a shell because it accepts arguments as an array instead of a string as exec does. Depending on the circumstances, that can prove to be useful if the arguments are already prepared. PR-URL: https://github.com/nodejs/node/pull/18384 Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Ruben Bridgewater --- test/parallel/test-child-process-execfile.js | 8 ++++++++ test/sequential/test-child-process-execsync.js | 6 ++++++ 2 files changed, 14 insertions(+) diff --git a/test/parallel/test-child-process-execfile.js b/test/parallel/test-child-process-execfile.js index 62cc7f534dc86b..a64128d6a3ab6b 100644 --- a/test/parallel/test-child-process-execfile.js +++ b/test/parallel/test-child-process-execfile.js @@ -6,6 +6,7 @@ const uv = process.binding('uv'); const fixtures = require('../common/fixtures'); const fixture = fixtures.path('exit.js'); +const execOpts = { encoding: 'utf8', shell: true }; { execFile( @@ -38,3 +39,10 @@ const fixture = fixtures.path('exit.js'); child.kill(); child.emit('close', code, null); } + +{ + // Verify the shell option works properly + execFile(process.execPath, [fixture, 0], execOpts, common.mustCall((err) => { + assert.ifError(err); + })); +} diff --git a/test/sequential/test-child-process-execsync.js b/test/sequential/test-child-process-execsync.js index e7c978406c883e..970ed867abed93 100644 --- a/test/sequential/test-child-process-execsync.js +++ b/test/sequential/test-child-process-execsync.js @@ -8,6 +8,7 @@ const TIMER = 200; const SLEEP = 2000; const start = Date.now(); +const execOpts = { encoding: 'utf8', shell: true }; let err; let caught = false; @@ -103,3 +104,8 @@ assert.strictEqual(ret, `${msg}\n`); return true; }); } + +// Verify the shell option works properly +assert.doesNotThrow(() => { + execFileSync(process.execPath, [], execOpts); +}); From cc7469eec8bd8e191a1c260f811addfb03e63196 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Tue, 9 Jan 2018 14:48:31 +1100 Subject: [PATCH 015/227] build: allow x86_64 as a dest_cpu alias for x64 x86_64 is a standard arch descriptor on Linux, allow it as an alias for our preferred descriptor: x64 PR-URL: https://github.com/nodejs/node/pull/18052 Reviewed-By: James M Snell Reviewed-By: Gireesh Punathil Reviewed-By: Gibson Fahnestock Reviewed-By: Joyee Cheung Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson Reviewed-By: Ruben Bridgewater --- configure | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/configure b/configure index e67a15c4753dd3..ac1bdfa527a429 100755 --- a/configure +++ b/configure @@ -61,7 +61,7 @@ parser = optparse.OptionParser() valid_os = ('win', 'mac', 'solaris', 'freebsd', 'openbsd', 'linux', 'android', 'aix') valid_arch = ('arm', 'arm64', 'ia32', 'mips', 'mipsel', 'mips64el', 'ppc', - 'ppc64', 'x32','x64', 'x86', 's390', 's390x') + 'ppc64', 'x32','x64', 'x86', 'x86_64', 's390', 's390x') valid_arm_float_abi = ('soft', 'softfp', 'hard') valid_arm_fpu = ('vfp', 'vfpv3', 'vfpv3-d16', 'neon') valid_mips_arch = ('loongson', 'r1', 'r2', 'r6', 'rx') @@ -825,6 +825,9 @@ def configure_node(o): # the Makefile resets this to x86 afterward if target_arch == 'x86': target_arch = 'ia32' + # x86_64 is common across linuxes, allow it as an alias for x64 + if target_arch == 'x86_64': + target_arch = 'x64' o['variables']['host_arch'] = host_arch o['variables']['target_arch'] = target_arch o['variables']['node_byteorder'] = sys.byteorder From 269c2f3ad9f6a1b48b7e7db7341abc7982bd6f6c Mon Sep 17 00:00:00 2001 From: Luigi Pinca Date: Mon, 29 Jan 2018 10:23:02 +0100 Subject: [PATCH 016/227] net: remove redundant code from _writeGeneric() The encoding is already handled by `Writable.prototype.write()`. PR-URL: https://github.com/nodejs/node/pull/18429 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Ruben Bridgewater --- lib/net.js | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/lib/net.js b/lib/net.js index ac036534d6b402..e91542fb2467a9 100644 --- a/lib/net.js +++ b/lib/net.js @@ -710,13 +710,7 @@ Socket.prototype._writeGeneric = function(writev, data, encoding, cb) { // Retain chunks if (err === 0) req._chunks = chunks; } else { - var enc; - if (data instanceof Buffer) { - enc = 'buffer'; - } else { - enc = encoding; - } - err = createWriteReq(req, this._handle, data, enc); + err = createWriteReq(req, this._handle, data, encoding); } if (err) From f3c6febedfa1249828ee29c193d241b2c93a9b64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Sun, 31 Dec 2017 17:39:30 +0100 Subject: [PATCH 017/227] test: update references to archived repository Backport-PR-URL: https://github.com/nodejs/node/pull/19120 PR-URL: https://github.com/nodejs/node/pull/17924 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Jon Moss Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater --- test/parallel/test-assert.js | 4 ++-- test/parallel/test-buffer-alloc.js | 14 +++++++++----- test/parallel/test-cluster-worker-init.js | 2 +- test/parallel/test-crypto-binary-default.js | 3 ++- test/parallel/test-crypto-cipher-decipher.js | 11 +++++++---- test/parallel/test-crypto-hash.js | 3 ++- test/parallel/test-crypto-random.js | 5 +++-- test/parallel/test-crypto.js | 4 ++-- test/parallel/test-dgram-ref.js | 2 +- test/parallel/test-dns-regress-7070.js | 3 ++- test/parallel/test-timers-unref.js | 3 ++- test/parallel/test-tls-set-encoding.js | 2 +- test/sequential/test-child-process-execsync.js | 6 ++++-- test/sequential/test-module-loading.js | 6 ++++-- 14 files changed, 42 insertions(+), 26 deletions(-) diff --git a/test/parallel/test-assert.js b/test/parallel/test-assert.js index aa51eb7e523e2e..55376328b026f2 100644 --- a/test/parallel/test-assert.js +++ b/test/parallel/test-assert.js @@ -522,7 +522,7 @@ testAssertionMessage({a: undefined, b: null}, '{ a: undefined, b: null }'); testAssertionMessage({a: NaN, b: Infinity, c: -Infinity}, '{ a: NaN, b: Infinity, c: -Infinity }'); -// #2893 +// https://github.com/nodejs/node-v0.x-archive/issues/2893 try { // eslint-disable-next-line no-restricted-syntax assert.throws(function() { @@ -534,7 +534,7 @@ try { } assert.ok(threw); -// #5292 +// https://github.com/nodejs/node-v0.x-archive/issues/5292 try { assert.strictEqual(1, 2); } catch (e) { diff --git a/test/parallel/test-buffer-alloc.js b/test/parallel/test-buffer-alloc.js index d0f02b114c1f97..e9053828c885af 100644 --- a/test/parallel/test-buffer-alloc.js +++ b/test/parallel/test-buffer-alloc.js @@ -629,7 +629,8 @@ assert.strictEqual('', x.inspect()); } { - // #1210 Test UTF-8 string includes null character + // https://github.com/nodejs/node-v0.x-archive/pull/1210 + // Test UTF-8 string includes null character let buf = Buffer.from('\0'); assert.strictEqual(buf.length, 1); buf = Buffer.from('\0\0'); @@ -653,7 +654,8 @@ assert.strictEqual('', x.inspect()); } { - // #243 Test write() with maxLength + // https://github.com/nodejs/node-v0.x-archive/issues/243 + // Test write() with maxLength const buf = Buffer.allocUnsafe(4); buf.fill(0xFF); assert.strictEqual(buf.write('abcd', 1, 2, 'utf8'), 2); @@ -937,11 +939,13 @@ assert.throws(() => Buffer.allocUnsafe(8).writeFloatLE(0.0, -1), RangeError); assert.strictEqual(buf.readIntBE(0, 5), -0x0012000000); } -// Regression test for #5482: should throw but not assert in C++ land. +// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/5482: +// should throw but not assert in C++ land. assert.throws(() => Buffer.from('', 'buffer'), TypeError); -// Regression test for #6111. Constructing a buffer from another buffer -// should a) work, and b) not corrupt the source buffer. +// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/6111. +// Constructing a buffer from another buffer should a) work, and b) not corrupt +// the source buffer. { const a = [...Array(128).keys()]; // [0, 1, 2, 3, ... 126, 127] const b = Buffer.from(a); diff --git a/test/parallel/test-cluster-worker-init.js b/test/parallel/test-cluster-worker-init.js index 4bd43c0ae58807..5a36956832ac69 100644 --- a/test/parallel/test-cluster-worker-init.js +++ b/test/parallel/test-cluster-worker-init.js @@ -21,7 +21,7 @@ if (cluster.isMaster) { worker.send(msg); }); } else { - // GH #7998 + // https://github.com/nodejs/node-v0.x-archive/issues/7998 cluster.worker.on('message', (message) => { process.send(message === msg); }); diff --git a/test/parallel/test-crypto-binary-default.js b/test/parallel/test-crypto-binary-default.js index ce29c4d35d80f8..bfe4be9d822b4e 100644 --- a/test/parallel/test-crypto-binary-default.js +++ b/test/parallel/test-crypto-binary-default.js @@ -393,7 +393,8 @@ fileStream.on('close', common.mustCall(function() { ); })); -// Issue #2227: unknown digest method should throw an error. +// Unknown digest method should throw an error: +// https://github.com/nodejs/node-v0.x-archive/issues/2227 assert.throws(function() { crypto.createHash('xyzzy'); }, /^Error: Digest method not supported$/); diff --git a/test/parallel/test-crypto-cipher-decipher.js b/test/parallel/test-crypto-cipher-decipher.js index 14a1601981f353..94dd29614bb656 100644 --- a/test/parallel/test-crypto-cipher-decipher.js +++ b/test/parallel/test-crypto-cipher-decipher.js @@ -70,7 +70,8 @@ testCipher1(Buffer.from('MySecretKey123')); testCipher2('0123456789abcdef'); testCipher2(Buffer.from('0123456789abcdef')); -// Base64 padding regression test, see #4837. +// Base64 padding regression test, see +// https://github.com/nodejs/node-v0.x-archive/issues/4837. { const c = crypto.createCipher('aes-256-cbc', 'secret'); const s = c.update('test', 'utf8', 'base64') + c.final('base64'); @@ -78,7 +79,7 @@ testCipher2(Buffer.from('0123456789abcdef')); } // Calling Cipher.final() or Decipher.final() twice should error but -// not assert. See #4886. +// not assert. See https://github.com/nodejs/node-v0.x-archive/issues/4886. { const c = crypto.createCipher('aes-256-cbc', 'secret'); try { c.final('xxx'); } catch (e) { /* Ignore. */ } @@ -90,14 +91,16 @@ testCipher2(Buffer.from('0123456789abcdef')); try { d.final('xxx'); } catch (e) { /* Ignore. */ } } -// Regression test for #5482: string to Cipher#update() should not assert. +// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/5482: +// string to Cipher#update() should not assert. { const c = crypto.createCipher('aes192', '0123456789abcdef'); c.update('update'); c.final(); } -// #5655 regression tests, 'utf-8' and 'utf8' are identical. +// https://github.com/nodejs/node-v0.x-archive/issues/5655 regression tests, +// 'utf-8' and 'utf8' are identical. { let c = crypto.createCipher('aes192', '0123456789abcdef'); c.update('update', ''); // Defaults to "utf8". diff --git a/test/parallel/test-crypto-hash.js b/test/parallel/test-crypto-hash.js index 25e568910ca068..300d6ba779eddf 100644 --- a/test/parallel/test-crypto-hash.js +++ b/test/parallel/test-crypto-hash.js @@ -104,7 +104,8 @@ fileStream.on('close', common.mustCall(function() { 'Test SHA1 of sample.png'); })); -// Issue #2227: unknown digest method should throw an error. +// Issue https://github.com/nodejs/node-v0.x-archive/issues/2227: unknown digest +// method should throw an error. assert.throws(function() { crypto.createHash('xyzzy'); }, /Digest method not supported/); diff --git a/test/parallel/test-crypto-random.js b/test/parallel/test-crypto-random.js index 386602e4a13f2d..65918bd1ad3cd2 100644 --- a/test/parallel/test-crypto-random.js +++ b/test/parallel/test-crypto-random.js @@ -230,8 +230,9 @@ const expectedErrorRegexp = /^TypeError: size must be a number >= 0$/; } } -// #5126, "FATAL ERROR: v8::Object::SetIndexedPropertiesToExternalArrayData() -// length exceeds max acceptable value" +// https://github.com/nodejs/node-v0.x-archive/issues/5126, +// "FATAL ERROR: v8::Object::SetIndexedPropertiesToExternalArrayData() length +// exceeds max acceptable value" assert.throws(function() { crypto.randomBytes((-1 >>> 0) + 1); }, /^TypeError: size must be a number >= 0$/); diff --git a/test/parallel/test-crypto.js b/test/parallel/test-crypto.js index 94443c03d1708e..46409244b5efb0 100644 --- a/test/parallel/test-crypto.js +++ b/test/parallel/test-crypto.js @@ -110,8 +110,8 @@ testImmutability(tls.getCiphers); testImmutability(crypto.getHashes); testImmutability(crypto.getCurves); -// Regression tests for #5725: hex input that's not a power of two should -// throw, not assert in C++ land. +// Regression tests for https://github.com/nodejs/node-v0.x-archive/pull/5725: +// hex input that's not a power of two should throw, not assert in C++ land. assert.throws(function() { crypto.createCipher('aes192', 'test').update('0', 'hex'); }, common.hasFipsCrypto ? /not supported in FIPS mode/ : /Bad input string/); diff --git a/test/parallel/test-dgram-ref.js b/test/parallel/test-dgram-ref.js index 7b79340f924b06..6e2af6f2503e1f 100644 --- a/test/parallel/test-dgram-ref.js +++ b/test/parallel/test-dgram-ref.js @@ -2,7 +2,7 @@ const common = require('../common'); const dgram = require('dgram'); -// should not hang, see #1282 +// should not hang, see https://github.com/nodejs/node-v0.x-archive/issues/1282 dgram.createSocket('udp4'); dgram.createSocket('udp6'); diff --git a/test/parallel/test-dns-regress-7070.js b/test/parallel/test-dns-regress-7070.js index eb939b47559a9d..4b9d2bfbef27a9 100644 --- a/test/parallel/test-dns-regress-7070.js +++ b/test/parallel/test-dns-regress-7070.js @@ -3,7 +3,8 @@ require('../common'); const assert = require('assert'); const dns = require('dns'); -// Should not raise assertion error. Issue #7070 +// Should not raise assertion error. +// Issue https://github.com/nodejs/node-v0.x-archive/issues/7070 assert.throws(() => dns.resolveNs([]), // bad name /^Error: "name" argument must be a string$/); assert.throws(() => dns.resolveNs(''), // bad callback diff --git a/test/parallel/test-timers-unref.js b/test/parallel/test-timers-unref.js index e4bca54b7d7b03..a62718043d8544 100644 --- a/test/parallel/test-timers-unref.js +++ b/test/parallel/test-timers-unref.js @@ -50,7 +50,8 @@ const check_unref = setInterval(() => { setInterval(() => timeout.unref(), SHORT_TIME); } -// Should not assert on args.Holder()->InternalFieldCount() > 0. See #4261. +// Should not assert on args.Holder()->InternalFieldCount() > 0. +// See https://github.com/nodejs/node-v0.x-archive/issues/4261. { const t = setInterval(() => {}, 1); process.nextTick(t.unref.bind({})); diff --git a/test/parallel/test-tls-set-encoding.js b/test/parallel/test-tls-set-encoding.js index dae4d2c31c2c64..637618b5cb29ac 100644 --- a/test/parallel/test-tls-set-encoding.js +++ b/test/parallel/test-tls-set-encoding.js @@ -42,7 +42,7 @@ server.listen(0, function() { client.on('close', function() { // readyState is deprecated but we want to make // sure this isn't triggering an assert in lib/net.js - // See issue #1069. + // See https://github.com/nodejs/node-v0.x-archive/issues/1069. assert.strictEqual('closed', client.readyState); // Confirming the buffer string is encoded in ASCII diff --git a/test/sequential/test-child-process-execsync.js b/test/sequential/test-child-process-execsync.js index 970ed867abed93..8c751e9c8a2e57 100644 --- a/test/sequential/test-child-process-execsync.js +++ b/test/sequential/test-child-process-execsync.js @@ -73,7 +73,8 @@ ret = execFileSync(process.execPath, args, { encoding: 'utf8' }); assert.strictEqual(ret, `${msg}\n`); -// Verify that the cwd option works - GH #7824 +// Verify that the cwd option works. +// See https://github.com/nodejs/node-v0.x-archive/issues/7824. { const cwd = common.rootDir; const cmd = common.isWindows ? 'echo %cd%' : 'pwd'; @@ -82,7 +83,8 @@ assert.strictEqual(ret, `${msg}\n`); assert.strictEqual(response.toString().trim(), cwd); } -// Verify that stderr is not accessed when stdio = 'ignore' - GH #7966 +// Verify that stderr is not accessed when stdio = 'ignore'. +// See https://github.com/nodejs/node-v0.x-archive/issues/7966. { assert.throws(function() { execSync('exit -1', {stdio: 'ignore'}); diff --git a/test/sequential/test-module-loading.js b/test/sequential/test-module-loading.js index e16aef6c02a01e..6b21e3ed21abc5 100644 --- a/test/sequential/test-module-loading.js +++ b/test/sequential/test-module-loading.js @@ -181,7 +181,8 @@ const child = require('../fixtures/module-require/child/'); assert.strictEqual(child.loaded, parent.loaded); -// #1357 Loading JSON files with require() +// Loading JSON files with require() +// See https://github.com/nodejs/node-v0.x-archive/issues/1357. const json = require('../fixtures/packages/main/package.json'); assert.deepStrictEqual(json, { name: 'package-name', @@ -289,7 +290,8 @@ process.on('exit', function() { }); -// #1440 Loading files with a byte order marker. +// Loading files with a byte order marker. +// See https://github.com/nodejs/node-v0.x-archive/issues/1440. assert.strictEqual(42, require('../fixtures/utf8-bom.js')); assert.strictEqual(42, require('../fixtures/utf8-bom.json')); From 72a5710b713320ca1b69f2395bfea2b1e775afa0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Sun, 31 Dec 2017 18:06:40 +0100 Subject: [PATCH 018/227] readline: update references to archived repository Backport-PR-URL: https://github.com/nodejs/node/pull/19120 PR-URL: https://github.com/nodejs/node/pull/17924 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Jon Moss Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater --- lib/readline.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/readline.js b/lib/readline.js index ca99fe15c0a593..a3a3452abacc8c 100644 --- a/lib/readline.js +++ b/lib/readline.js @@ -700,7 +700,8 @@ Interface.prototype._moveCursor = function(dx) { Interface.prototype._ttyWrite = function(s, key) { key = key || {}; - // Ignore escape key - Fixes #2876 + // Ignore escape key, fixes + // https://github.com/nodejs/node-v0.x-archive/issues/2876. if (key.name === 'escape') return; if (key.ctrl && key.shift) { From 49d8c2e8aef6e2e5d5f80cc7f0adf313cb1df5d5 Mon Sep 17 00:00:00 2001 From: Yihong Wang Date: Mon, 4 Dec 2017 16:07:53 -0800 Subject: [PATCH 019/227] build: refine static and shared lib build Refine the static and shared lib build process in order to integrate static and shared lib verfication into CI. When building both static and shared lib, we still build node executable now and it uses the shared and static lib. Signed-off-by: Yihong Wang Refs: https://github.com/nodejs/node/issues/14158 Backport-PR-URL: https://github.com/nodejs/node/pull/19050 PR-URL: https://github.com/nodejs/node/pull/17604 Reviewed-By: Bartosz Sosnowski Reviewed-By: Ben Noordhuis Reviewed-By: Daniel Bevenius --- configure | 9 +- node.gyp | 442 +++++++++++++++++++++++++++++++++++++++-------- node.gypi | 278 +++++++++++------------------ src/node_main.cc | 1 + 4 files changed, 479 insertions(+), 251 deletions(-) diff --git a/configure b/configure index ac1bdfa527a429..dd72cc332436a2 100755 --- a/configure +++ b/configure @@ -845,7 +845,6 @@ def configure_node(o): configure_mips(o) if flavor == 'aix': - o['variables']['node_core_target_name'] = 'node_base' o['variables']['node_target_type'] = 'static_library' if target_arch in ('x86', 'x64', 'ia32', 'x32'): @@ -945,6 +944,13 @@ def configure_node(o): else: o['variables']['coverage'] = 'false' + if options.shared: + o['variables']['node_target_type'] = 'shared_library' + elif options.enable_static: + o['variables']['node_target_type'] = 'static_library' + else: + o['variables']['node_target_type'] = 'executable' + def configure_library(lib, output): shared_lib = 'shared_' + lib output['variables']['node_' + shared_lib] = b(getattr(options, shared_lib)) @@ -1440,6 +1446,7 @@ config = { 'BUILDTYPE': 'Debug' if options.debug else 'Release', 'USE_XCODE': str(int(options.use_xcode or 0)), 'PYTHON': sys.executable, + 'NODE_TARGET_TYPE': variables['node_target_type'], } if options.prefix: diff --git a/node.gyp b/node.gyp index 8a7af9bfd1e50a..7dff6bf3f22c1e 100644 --- a/node.gyp +++ b/node.gyp @@ -21,6 +21,8 @@ 'node_v8_options%': '', 'node_enable_v8_vtunejit%': 'false', 'node_core_target_name%': 'node', + 'node_lib_target_name%': 'node_lib', + 'node_intermediate_lib_type%': 'static_library', 'library_files': [ 'lib/internal/bootstrap_node.js', 'lib/_debug_agent.js', @@ -111,6 +113,17 @@ 'conditions': [ [ 'node_shared=="true"', { 'node_target_type%': 'shared_library', + 'conditions': [ + ['OS=="aix"', { + # For AIX, always generate static library first, + # It needs an extra step to generate exp and + # then use both static lib and exp to create + # shared lib. + 'node_intermediate_lib_type': 'static_library', + }, { + 'node_intermediate_lib_type': 'shared_library', + }], + ], }, { 'node_target_type%': 'executable', }], @@ -127,7 +140,81 @@ 'targets': [ { 'target_name': '<(node_core_target_name)', - 'type': '<(node_target_type)', + 'type': 'executable', + 'sources': [ + 'src/node_main.cc' + ], + 'include_dirs': [ + 'src', + 'deps/v8/include', + ], + 'conditions': [ + [ 'node_intermediate_lib_type=="static_library" and ' + 'node_shared=="true" and OS=="aix"', { + # For AIX, shared lib is linked by static lib and .exp. In the + # case here, the executable needs to link to shared lib. + # Therefore, use 'node_aix_shared' target to generate the + # shared lib and then executable. + 'dependencies': [ 'node_aix_shared' ], + }, { + 'dependencies': [ '<(node_lib_target_name)' ], + }], + [ 'node_intermediate_lib_type=="static_library" and ' + 'node_shared=="false"', { + 'includes': [ + 'node.gypi' + ], + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-Wl,-force_load,<(PRODUCT_DIR)/<(STATIC_LIB_PREFIX)' + '<(node_core_target_name)<(STATIC_LIB_SUFFIX)', + ], + }, + 'msvs_settings': { + 'VCLinkerTool': { + 'AdditionalOptions': [ + '/WHOLEARCHIVE:<(PRODUCT_DIR)\\lib\\' + '<(node_core_target_name)<(STATIC_LIB_SUFFIX)', + ], + }, + }, + 'conditions': [ + ['OS in "linux freebsd openbsd solaris android"', { + 'ldflags': [ + '-Wl,--whole-archive,<(OBJ_DIR)/<(STATIC_LIB_PREFIX)' + '<(node_core_target_name)<(STATIC_LIB_SUFFIX)', + '-Wl,--no-whole-archive', + ], + }], + [ 'OS=="win"', { + 'sources': [ 'src/res/node.rc' ], + 'conditions': [ + [ 'node_use_etw=="true"', { + 'sources': [ + 'tools/msvs/genfiles/node_etw_provider.rc' + ], + }], + [ 'node_use_perfctr=="true"', { + 'sources': [ + 'tools/msvs/genfiles/node_perfctr_provider.rc', + ], + }] + ], + }], + ], + }], + [ 'node_intermediate_lib_type=="shared_library" and OS=="win"', { + # On Windows, having the same name for both executable and shared + # lib causes filename collision. Need a different PRODUCT_NAME for + # the executable and rename it back to node.exe later + 'product_name': '<(node_core_target_name)-win', + }], + ], + }, + { + 'target_name': '<(node_lib_target_name)', + 'type': '<(node_intermediate_lib_type)', + 'product_name': '<(node_core_target_name)', 'dependencies': [ 'node_js2c#host', @@ -139,7 +226,6 @@ 'include_dirs': [ 'src', - 'tools/msvs/genfiles', 'deps/uv/src/ares', '<(SHARED_INTERMEDIATE_DIR)', ], @@ -161,7 +247,6 @@ 'src/node_contextify.cc', 'src/node_file.cc', 'src/node_http_parser.cc', - 'src/node_main.cc', 'src/node_os.cc', 'src/node_revert.cc', 'src/node_url.cc', @@ -249,7 +334,168 @@ 'conditions': [ [ 'node_shared=="true" and node_module_version!="" and OS!="win"', { 'product_extension': '<(shlib_suffix)', - }] + }], + ['node_shared=="true" and OS=="aix"', { + 'product_name': 'node_base', + }], + [ 'v8_inspector=="true"', { + 'defines': [ + 'HAVE_INSPECTOR=1', + ], + 'sources': [ + 'src/inspector_agent.cc', + 'src/inspector_socket.cc', + 'src/inspector_agent.h', + 'src/inspector_socket.h', + ], + 'dependencies': [ + 'deps/v8_inspector/third_party/v8_inspector/platform/' + 'v8_inspector/v8_inspector.gyp:v8_inspector_stl', + 'deps/v8_inspector/third_party/v8_inspector/platform/' + 'v8_inspector/v8_inspector.gyp:protocol_sources_stl', + 'v8_inspector_compress_protocol_json#host', + ], + 'include_dirs': [ + 'deps/v8_inspector/third_party/v8_inspector', + '<(SHARED_INTERMEDIATE_DIR)/blink', # for inspector + ], + }, { + 'defines': [ 'HAVE_INSPECTOR=0' ] + }], + [ 'OS=="win"', { + 'sources': [ + 'src/backtrace_win32.cc', + ], + 'conditions': [ + [ 'node_intermediate_lib_type!="static_library"', { + 'sources': [ + 'src/res/node.rc', + ], + }], + ], + 'defines!': [ + 'NODE_PLATFORM="win"', + ], + 'defines': [ + 'FD_SETSIZE=1024', + # we need to use node's preferred "win32" rather than gyp's preferred "win" + 'NODE_PLATFORM="win32"', + '_UNICODE=1', + ], + 'libraries': [ '-lpsapi.lib' ] + }, { # POSIX + 'defines': [ '__POSIX__' ], + 'sources': [ 'src/backtrace_posix.cc' ], + }], + [ 'node_use_etw=="true"', { + 'defines': [ 'HAVE_ETW=1' ], + 'dependencies': [ 'node_etw' ], + 'include_dirs': [ + 'src', + 'tools/msvs/genfiles', + '<(SHARED_INTERMEDIATE_DIR)' # for node_natives.h + ], + 'sources': [ + 'src/node_win32_etw_provider.h', + 'src/node_win32_etw_provider-inl.h', + 'src/node_win32_etw_provider.cc', + 'src/node_dtrace.cc', + 'tools/msvs/genfiles/node_etw_provider.h', + ], + 'conditions': [ + ['node_intermediate_lib_type != "static_library"', { + 'sources': [ + 'tools/msvs/genfiles/node_etw_provider.rc', + ], + }], + ], + }], + [ 'node_use_perfctr=="true"', { + 'defines': [ 'HAVE_PERFCTR=1' ], + 'dependencies': [ 'node_perfctr' ], + 'include_dirs': [ + 'src', + 'tools/msvs/genfiles', + '<(SHARED_INTERMEDIATE_DIR)' # for node_natives.h + ], + 'sources': [ + 'src/node_win32_perfctr_provider.h', + 'src/node_win32_perfctr_provider.cc', + 'src/node_counters.cc', + 'src/node_counters.h', + ], + 'conditions': [ + ['node_intermediate_lib_type != "static_library"', { + 'sources': [ + 'tools/msvs/genfiles/node_perfctr_provider.rc', + ], + }], + ], + }], + [ 'node_use_lttng=="true"', { + 'defines': [ 'HAVE_LTTNG=1' ], + 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)' ], + 'libraries': [ '-llttng-ust' ], + 'include_dirs': [ + 'src', + 'tools/msvs/genfiles', + '<(SHARED_INTERMEDIATE_DIR)' # for node_natives.h + ], + 'sources': [ + 'src/node_lttng.cc' + ], + }], + [ 'node_use_dtrace=="true"', { + 'defines': [ 'HAVE_DTRACE=1' ], + 'dependencies': [ + 'node_dtrace_header', + 'specialize_node_d', + ], + 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)' ], + # + # DTrace is supported on linux, solaris, mac, and bsd. There are + # three object files associated with DTrace support, but they're + # not all used all the time: + # + # node_dtrace.o all configurations + # node_dtrace_ustack.o not supported on mac and linux + # node_dtrace_provider.o All except OS X. "dtrace -G" is not + # used on OS X. + # + # Note that node_dtrace_provider.cc and node_dtrace_ustack.cc do not + # actually exist. They're listed here to trick GYP into linking the + # corresponding object files into the final "node" executable. These + # object files are generated by "dtrace -G" using custom actions + # below, and the GYP-generated Makefiles will properly build them when + # needed. + # + 'sources': [ 'src/node_dtrace.cc' ], + 'conditions': [ + [ 'OS=="linux"', { + 'sources': [ + '<(SHARED_INTERMEDIATE_DIR)/node_dtrace_provider.o' + ], + }], + [ 'OS!="mac" and OS!="linux"', { + 'sources': [ + 'src/node_dtrace_ustack.cc', + 'src/node_dtrace_provider.cc', + ] + } + ] ] + } ], + [ 'node_use_openssl=="true"', { + 'sources': [ + 'src/node_crypto.cc', + 'src/node_crypto_bio.cc', + 'src/node_crypto_clienthello.cc', + 'src/node_crypto.h', + 'src/node_crypto_bio.h', + 'src/node_crypto_clienthello.h', + 'src/tls_wrap.cc', + 'src/tls_wrap.h' + ], + }], ], 'direct_dependent_settings': { 'defines': [ @@ -398,7 +644,7 @@ [ 'node_use_dtrace=="false" and node_use_etw=="false"', { 'inputs': [ 'src/notrace_macros.py' ] }], - ['node_use_lttng=="false"', { + [ 'node_use_lttng=="false"', { 'inputs': [ 'src/nolttng_macros.py' ] }], [ 'node_use_perfctr=="false"', { @@ -451,10 +697,10 @@ { 'action_name': 'node_dtrace_provider_o', 'inputs': [ - '<(OBJ_DIR)/node/src/node_dtrace.o', + '<(OBJ_DIR)/<(node_lib_target_name)/src/node_dtrace.o', ], 'outputs': [ - '<(OBJ_DIR)/node/src/node_dtrace_provider.o' + '<(OBJ_DIR)/<(node_lib_target_name)/src/node_dtrace_provider.o' ], 'action': [ 'dtrace', '-G', '-xnolibs', '-s', 'src/node_provider.d', '<@(_inputs)', '-o', '<@(_outputs)' ] @@ -504,7 +750,7 @@ '<(SHARED_INTERMEDIATE_DIR)/v8constants.h' ], 'outputs': [ - '<(OBJ_DIR)/node/src/node_dtrace_ustack.o' + '<(OBJ_DIR)/<(node_lib_target_name)/src/node_dtrace_ustack.o' ], 'conditions': [ [ 'target_arch=="ia32" or target_arch=="arm"', { @@ -551,12 +797,41 @@ } ], ] }, + { + # When using shared lib to build executable in Windows, in order to avoid + # filename collision, the executable name is node-win.exe. Need to rename + # it back to node.exe + 'target_name': 'rename_node_bin_win', + 'type': 'none', + 'dependencies': [ + '<(node_core_target_name)', + ], + 'conditions': [ + [ 'OS=="win" and node_intermediate_lib_type=="shared_library"', { + 'actions': [ + { + 'action_name': 'rename_node_bin_win', + 'inputs': [ + '<(PRODUCT_DIR)/<(node_core_target_name)-win.exe' + ], + 'outputs': [ + '<(PRODUCT_DIR)/<(node_core_target_name).exe', + ], + 'action': [ + 'mv', '<@(_inputs)', '<@(_outputs)', + ], + }, + ], + } ], + ] + }, { 'target_name': 'cctest', 'type': 'executable', 'dependencies': [ '<(node_core_target_name)', + 'rename_node_bin_win', 'deps/gtest/gtest.gyp:gtest', 'node_js2c#host', 'node_dtrace_header', @@ -565,18 +840,18 @@ ], 'variables': { - 'OBJ_PATH': '<(OBJ_DIR)/node/src', - 'OBJ_GEN_PATH': '<(OBJ_DIR)/node/gen', + 'OBJ_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/src', + 'OBJ_GEN_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/gen', 'OBJ_SUFFIX': 'o', 'conditions': [ ['OS=="win"', { - 'OBJ_PATH': '<(OBJ_DIR)/node', - 'OBJ_GEN_PATH': '<(OBJ_DIR)/node', + 'OBJ_PATH': '<(OBJ_DIR)/<(node_lib_target_name)', + 'OBJ_GEN_PATH': '<(OBJ_DIR)/<(node_lib_target_name)', 'OBJ_SUFFIX': 'obj', }], ['OS=="aix"', { - 'OBJ_PATH': '<(OBJ_DIR)/node_base/src', - 'OBJ_GEN_PATH': '<(OBJ_DIR)/node_base/gen', + 'OBJ_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/src', + 'OBJ_GEN_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/gen', }], ], }, @@ -628,41 +903,91 @@ 'test/cctest/test_url.cc' ], - 'sources!': [ - 'src/node_main.cc' - ], - 'conditions': [ + [ 'node_use_openssl=="true"', { + 'conditions': [ + ['node_target_type!="static_library"', { + 'libraries': [ + '<(OBJ_PATH)/node_crypto.<(OBJ_SUFFIX)', + '<(OBJ_PATH)/node_crypto_bio.<(OBJ_SUFFIX)', + '<(OBJ_PATH)/node_crypto_clienthello.<(OBJ_SUFFIX)', + '<(OBJ_PATH)/tls_wrap.<(OBJ_SUFFIX)', + ], + }], + ], + 'defines': [ + 'HAVE_OPENSSL=1', + ], + }], + [ 'node_use_perfctr=="true"', { + 'defines': [ 'HAVE_PERFCTR=1' ], + 'libraries': [ + '<(OBJ_PATH)/node_counters.<(OBJ_SUFFIX)', + '<(OBJ_PATH)/node_win32_perfctr_provider.<(OBJ_SUFFIX)', + ], + }], ['v8_inspector=="true"', { 'sources': [ 'test/cctest/test_inspector_socket.cc', ], + 'dependencies': [ + 'deps/v8_inspector/third_party/v8_inspector/platform/' + 'v8_inspector/v8_inspector.gyp:v8_inspector_stl', + 'deps/v8_inspector/third_party/v8_inspector/platform/' + 'v8_inspector/v8_inspector.gyp:protocol_sources_stl', + 'v8_inspector_compress_protocol_json#host', + ], + 'include_dirs': [ + 'deps/v8_inspector/third_party/v8_inspector', + '<(SHARED_INTERMEDIATE_DIR)/blink', # for inspector + ], + 'libraries': [ + '<(OBJ_PATH)/inspector_agent.<(OBJ_SUFFIX)', + '<(OBJ_PATH)/inspector_socket.<(OBJ_SUFFIX)', + ], 'conditions': [ [ 'node_shared_openssl=="false" and node_shared=="false"', { 'dependencies': [ 'deps/openssl/openssl.gyp:openssl' ] }], - [ 'node_shared_http_parser=="false"', { - 'dependencies': [ - 'deps/http_parser/http_parser.gyp:http_parser' + ] + }], + [ 'node_use_dtrace=="true"', { + 'libraries': [ + '<(OBJ_PATH)/node_dtrace.<(OBJ_SUFFIX)', + ], + 'conditions': [ + ['OS!="mac" and OS!="linux"', { + 'libraries': [ + '<(OBJ_PATH)/node_dtrace_provider.<(OBJ_SUFFIX)', + '<(OBJ_PATH)/node_dtrace_ustack.<(OBJ_SUFFIX)', ] }], - [ 'node_shared_libuv=="false"', { - 'dependencies': [ - 'deps/uv/uv.gyp:libuv' + ['OS=="linux"', { + 'libraries': [ + '<(SHARED_INTERMEDIATE_DIR)/node_dtrace_provider.<(OBJ_SUFFIX)', ] + }], + ], + }, { + 'conditions': [ + [ 'node_use_etw=="true" and OS=="win"', { + 'libraries': [ + '<(OBJ_PATH)/node_dtrace.<(OBJ_SUFFIX)', + '<(OBJ_PATH)/node_win32_etw_provider.<(OBJ_SUFFIX)', + ], }] ] }], - [ 'node_use_dtrace=="true" and OS!="mac" and OS!="linux"', { - 'copies': [{ - 'destination': '<(OBJ_DIR)/cctest/src', - 'files': [ - '<(OBJ_PATH)/node_dtrace_ustack.<(OBJ_SUFFIX)', - '<(OBJ_PATH)/node_dtrace_provider.<(OBJ_SUFFIX)', - '<(OBJ_PATH)/node_dtrace.<(OBJ_SUFFIX)', - ]}, + [ 'OS=="win"', { + 'libraries': [ + '<(OBJ_PATH)/backtrace_win32.<(OBJ_SUFFIX)', + ], + }, { # POSIX + 'defines': [ '__POSIX__' ], + 'libraries': [ + '<(OBJ_PATH)/backtrace_posix.<(OBJ_SUFFIX)', ], }], ['OS=="solaris"', { @@ -673,21 +998,19 @@ ], # end targets 'conditions': [ - ['OS=="aix"', { + [ 'OS=="aix" and node_shared=="true"', { 'targets': [ { - 'target_name': 'node', + 'target_name': 'node_aix_shared', + 'type': 'shared_library', + 'product_name': '<(node_core_target_name)', + 'ldflags': [ '--shared' ], + 'product_extension': '<(shlib_suffix)', 'conditions': [ - ['node_shared=="true"', { - 'type': 'shared_library', - 'ldflags': ['--shared'], - 'product_extension': '<(shlib_suffix)', - }, { - 'type': 'executable', - }], ['target_arch=="ppc64"', { 'ldflags': [ - '-Wl,-blibpath:/usr/lib:/lib:/opt/freeware/lib/pthread/ppc64' + '-Wl,-blibpath:/usr/lib:/lib:' + '/opt/freeware/lib/pthread/ppc64' ], }], ['target_arch=="ppc"', { @@ -696,45 +1019,20 @@ ], }] ], - 'dependencies': ['<(node_core_target_name)', 'node_exp'], - + 'includes': [ + 'node.gypi' + ], + 'dependencies': [ '<(node_lib_target_name)' ], 'include_dirs': [ 'src', 'deps/v8/include', ], - 'sources': [ - 'src/node_main.cc', '<@(library_files)', - # node.gyp is added to the project by default. 'common.gypi', ], - - 'ldflags': ['-Wl,-bE:<(PRODUCT_DIR)/node.exp'], }, - { - 'target_name': 'node_exp', - 'type': 'none', - 'dependencies': [ - '<(node_core_target_name)', - ], - 'actions': [ - { - 'action_name': 'expfile', - 'inputs': [ - '<(OBJ_DIR)' - ], - 'outputs': [ - '<(PRODUCT_DIR)/node.exp' - ], - 'action': [ - 'sh', 'tools/create_expfile.sh', - '<@(_inputs)', '<@(_outputs)' - ], - } - ] - } - ], # end targets + ] }], # end aix section ], # end conditions block } diff --git a/node.gypi b/node.gypi index 3b3548fdab5308..baa7e139b525b8 100644 --- a/node.gypi +++ b/node.gypi @@ -1,4 +1,29 @@ { + # 'force_load' means to include the static libs into the shared lib or + # executable. Therefore, it is enabled when building: + # 1. The executable and it uses static lib (cctest and node) + # 2. The shared lib + # Linker optimizes out functions that are not used. When force_load=true, + # --whole-archive,force_load and /WHOLEARCHIVE are used to include + # all obj files in static libs into the executable or shared lib. + 'variables': { + 'variables': { + 'variables': { + 'force_load%': 'true', + 'current_type%': '<(_type)', + }, + 'force_load%': '<(force_load)', + 'conditions': [ + ['current_type=="static_library"', { + 'force_load': 'false', + }], + [ 'current_type=="executable" and node_target_type=="shared_library"', { + 'force_load': 'false', + }] + ], + }, + 'force_load%': '<(force_load)', + }, 'conditions': [ [ 'node_shared=="false"', { 'msvs_settings': { @@ -36,12 +61,6 @@ [ 'node_v8_options!=""', { 'defines': [ 'NODE_V8_OPTIONS="<(node_v8_options)"'], }], - # No node_main.cc for anything except executable - [ 'node_target_type!="executable"', { - 'sources!': [ - 'src/node_main.cc', - ], - }], [ 'node_release_urlbase!=""', { 'defines': [ 'NODE_RELEASE_URLBASE="<(node_release_urlbase)"', @@ -66,156 +85,6 @@ 'deps/v8/src/third_party/vtune/v8vtune.gyp:v8_vtune' ], }], - ['v8_inspector=="true"', { - 'defines': [ - 'HAVE_INSPECTOR=1', - ], - 'sources': [ - 'src/inspector_agent.cc', - 'src/inspector_socket.cc', - 'src/inspector_agent.h', - 'src/inspector_socket.h', - ], - 'dependencies': [ - 'deps/v8_inspector/third_party/v8_inspector/platform/' - 'v8_inspector/v8_inspector.gyp:v8_inspector_stl', - 'v8_inspector_compress_protocol_json#host', - ], - 'include_dirs': [ - 'deps/v8_inspector/third_party/v8_inspector', - '<(SHARED_INTERMEDIATE_DIR)/blink', # for inspector - ], - }, { - 'defines': [ 'HAVE_INSPECTOR=0' ] - }], - [ 'node_use_openssl=="true"', { - 'defines': [ 'HAVE_OPENSSL=1' ], - 'sources': [ - 'src/node_crypto.cc', - 'src/node_crypto_bio.cc', - 'src/node_crypto_clienthello.cc', - 'src/node_crypto.h', - 'src/node_crypto_bio.h', - 'src/node_crypto_clienthello.h', - 'src/tls_wrap.cc', - 'src/tls_wrap.h' - ], - 'conditions': [ - ['openssl_fips != ""', { - 'defines': [ 'NODE_FIPS_MODE' ], - }], - [ 'node_shared_openssl=="false"', { - 'dependencies': [ - './deps/openssl/openssl.gyp:openssl', - - # For tests - './deps/openssl/openssl.gyp:openssl-cli', - ], - # Do not let unused OpenSSL symbols to slip away - 'conditions': [ - # -force_load or --whole-archive are not applicable for - # the static library - [ 'node_target_type!="static_library"', { - 'xcode_settings': { - 'OTHER_LDFLAGS': [ - '-Wl,-force_load,<(PRODUCT_DIR)/<(OPENSSL_PRODUCT)', - ], - }, - 'conditions': [ - ['OS in "linux freebsd" and node_shared=="false"', { - 'ldflags': [ - '-Wl,--whole-archive,' - '<(OBJ_DIR)/deps/openssl/' - '<(OPENSSL_PRODUCT)', - '-Wl,--no-whole-archive', - ], - }], - # openssl.def is based on zlib.def, zlib symbols - # are always exported. - ['use_openssl_def==1', { - 'sources': ['<(SHARED_INTERMEDIATE_DIR)/openssl.def'], - }], - ['OS=="win" and use_openssl_def==0', { - 'sources': ['deps/zlib/win32/zlib.def'], - }], - ], - }], - ], - }]] - }, { - 'defines': [ 'HAVE_OPENSSL=0' ] - }], - [ 'node_use_dtrace=="true"', { - 'defines': [ 'HAVE_DTRACE=1' ], - 'dependencies': [ - 'node_dtrace_header', - 'specialize_node_d', - ], - 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)' ], - - # - # DTrace is supported on linux, solaris, mac, and bsd. There are - # three object files associated with DTrace support, but they're - # not all used all the time: - # - # node_dtrace.o all configurations - # node_dtrace_ustack.o not supported on mac and linux - # node_dtrace_provider.o All except OS X. "dtrace -G" is not - # used on OS X. - # - # Note that node_dtrace_provider.cc and node_dtrace_ustack.cc do not - # actually exist. They're listed here to trick GYP into linking the - # corresponding object files into the final "node" executable. These - # object files are generated by "dtrace -G" using custom actions - # below, and the GYP-generated Makefiles will properly build them when - # needed. - # - 'sources': [ 'src/node_dtrace.cc' ], - 'conditions': [ - [ 'OS=="linux"', { - 'sources': [ - '<(SHARED_INTERMEDIATE_DIR)/node_dtrace_provider.o' - ], - }], - [ 'OS!="mac" and OS!="linux"', { - 'sources': [ - 'src/node_dtrace_ustack.cc', - 'src/node_dtrace_provider.cc', - ] - } - ] ] - } ], - [ 'node_use_lttng=="true"', { - 'defines': [ 'HAVE_LTTNG=1' ], - 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)' ], - 'libraries': [ '-llttng-ust' ], - 'sources': [ - 'src/node_lttng.cc' - ], - } ], - [ 'node_use_etw=="true"', { - 'defines': [ 'HAVE_ETW=1' ], - 'dependencies': [ 'node_etw' ], - 'sources': [ - 'src/node_win32_etw_provider.h', - 'src/node_win32_etw_provider-inl.h', - 'src/node_win32_etw_provider.cc', - 'src/node_dtrace.cc', - 'tools/msvs/genfiles/node_etw_provider.h', - 'tools/msvs/genfiles/node_etw_provider.rc', - ] - } ], - [ 'node_use_perfctr=="true"', { - 'defines': [ 'HAVE_PERFCTR=1' ], - 'dependencies': [ 'node_perfctr' ], - 'sources': [ - 'src/node_win32_perfctr_provider.h', - 'src/node_win32_perfctr_provider.cc', - 'src/node_counters.cc', - 'src/node_counters.h', - 'tools/msvs/genfiles/node_perfctr_provider.rc', - ] - } ], [ 'node_no_browser_globals=="true"', { 'defines': [ 'NODE_NO_BROWSER_GLOBALS' ], } ], @@ -223,7 +92,7 @@ 'dependencies': [ 'deps/v8/tools/gyp/v8.gyp:postmortem-metadata' ], 'conditions': [ # -force_load is not applicable for the static library - [ 'node_target_type!="static_library"', { + [ 'force_load=="true"', { 'xcode_settings': { 'OTHER_LDFLAGS': [ '-Wl,-force_load,<(V8_BASE)', @@ -248,25 +117,6 @@ 'dependencies': [ 'deps/uv/uv.gyp:libuv' ], }], - [ 'OS=="win"', { - 'sources': [ - 'src/backtrace_win32.cc', - 'src/res/node.rc', - ], - 'defines!': [ - 'NODE_PLATFORM="win"', - ], - 'defines': [ - 'FD_SETSIZE=1024', - # we need to use node's preferred "win32" rather than gyp's preferred "win" - 'NODE_PLATFORM="win32"', - '_UNICODE=1', - ], - 'libraries': [ '-lpsapi.lib' ] - }, { # POSIX - 'defines': [ '__POSIX__' ], - 'sources': [ 'src/backtrace_posix.cc' ], - }], [ 'OS=="mac"', { # linking Corefoundation is needed since certain OSX debugging tools # like Instruments require it for some features @@ -289,6 +139,27 @@ 'defines': [ '_LINUX_SOURCE_COMPAT', ], + 'conditions': [ + [ 'force_load=="true"', { + + 'actions': [ + { + 'action_name': 'expfile', + 'inputs': [ + '<(OBJ_DIR)' + ], + 'outputs': [ + '<(PRODUCT_DIR)/node.exp' + ], + 'action': [ + 'sh', 'tools/create_expfile.sh', + '<@(_inputs)', '<@(_outputs)' + ], + } + ], + 'ldflags': ['-Wl,-bE:<(PRODUCT_DIR)/node.exp', '-Wl,-brtl'], + }], + ], }], [ 'OS=="solaris"', { 'libraries': [ @@ -304,12 +175,14 @@ 'NODE_PLATFORM="sunos"', ], }], - [ '(OS=="freebsd" or OS=="linux") and node_shared=="false" and coverage=="false"', { + [ '(OS=="freebsd" or OS=="linux") and node_shared=="false"' + ' and coverage=="false" and force_load=="true"', { 'ldflags': [ '-Wl,-z,noexecstack', '-Wl,--whole-archive <(V8_BASE)', '-Wl,--no-whole-archive' ] }], - [ '(OS=="freebsd" or OS=="linux") and node_shared=="false" and coverage=="true"', { + [ '(OS=="freebsd" or OS=="linux") and node_shared=="false"' + ' and coverage=="true" and force_load=="true"', { 'ldflags': [ '-Wl,-z,noexecstack', '-Wl,--whole-archive <(V8_BASE)', '-Wl,--no-whole-archive', @@ -324,5 +197,54 @@ [ 'OS=="sunos"', { 'ldflags': [ '-Wl,-M,/usr/lib/ld/map.noexstk' ], }], + + [ 'node_use_openssl=="true"', { + 'defines': [ 'HAVE_OPENSSL=1' ], + 'conditions': [ + ['openssl_fips != ""', { + 'defines': [ 'NODE_FIPS_MODE' ], + }], + [ 'node_shared_openssl=="false"', { + 'dependencies': [ + './deps/openssl/openssl.gyp:openssl', + + # For tests + './deps/openssl/openssl.gyp:openssl-cli', + ], + 'conditions': [ + # -force_load or --whole-archive are not applicable for + # the static library + [ 'force_load=="true"', { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-Wl,-force_load,<(PRODUCT_DIR)/<(OPENSSL_PRODUCT)', + ], + }, + 'conditions': [ + ['OS in "linux freebsd" and node_shared=="false"', { + 'ldflags': [ + '-Wl,--whole-archive,' + '<(OBJ_DIR)/deps/openssl/' + '<(OPENSSL_PRODUCT)', + '-Wl,--no-whole-archive', + ], + }], + # openssl.def is based on zlib.def, zlib symbols + # are always exported. + ['use_openssl_def==1', { + 'sources': ['<(SHARED_INTERMEDIATE_DIR)/openssl.def'], + }], + ['OS=="win" and use_openssl_def==0', { + 'sources': ['deps/zlib/win32/zlib.def'], + }], + ], + }], + ], + }]] + + }, { + 'defines': [ 'HAVE_OPENSSL=0' ] + }], + ], } diff --git a/src/node_main.cc b/src/node_main.cc index bde397562490e0..89ca076eeb43b4 100644 --- a/src/node_main.cc +++ b/src/node_main.cc @@ -1,6 +1,7 @@ #include "node.h" #ifdef _WIN32 +#include #include int wmain(int argc, wchar_t *wargv[]) { From 8f830ca896a13722b2295bf4ff6f03193fd8ba4e Mon Sep 17 00:00:00 2001 From: Luigi Pinca Date: Thu, 18 Jan 2018 18:39:02 +0100 Subject: [PATCH 020/227] stream: remove unreachable code To avoid a function call `BufferList.prototype.concat()` is not called when there is only a buffer in the list. That buffer is instead accessed directly. Backport-PR-URL: https://github.com/nodejs/node/pull/19483 PR-URL: https://github.com/nodejs/node/pull/18239 Reviewed-By: Matteo Collina --- lib/internal/streams/BufferList.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/lib/internal/streams/BufferList.js b/lib/internal/streams/BufferList.js index 76da94bc83d977..9d3badf5b378bd 100644 --- a/lib/internal/streams/BufferList.js +++ b/lib/internal/streams/BufferList.js @@ -58,8 +58,6 @@ BufferList.prototype.join = function(s) { BufferList.prototype.concat = function(n) { if (this.length === 0) return Buffer.alloc(0); - if (this.length === 1) - return this.head.data; const ret = Buffer.allocUnsafe(n >>> 0); var p = this.head; var i = 0; From 5bcf668f42566c45286c41eb0c817612412de753 Mon Sep 17 00:00:00 2001 From: Luigi Pinca Date: Thu, 18 Jan 2018 18:51:09 +0100 Subject: [PATCH 021/227] test: use correct size in test-stream-buffer-list The `n` argument of `BufferList.prototype.concat()` is not the number of `Buffer` instances in the list, but their total length when concatenated. Backport-PR-URL: https://github.com/nodejs/node/pull/19483 PR-URL: https://github.com/nodejs/node/pull/18239 Reviewed-By: Matteo Collina --- test/parallel/test-stream-buffer-list.js | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/test/parallel/test-stream-buffer-list.js b/test/parallel/test-stream-buffer-list.js index ddbff452de4be9..6ea359b458f61b 100644 --- a/test/parallel/test-stream-buffer-list.js +++ b/test/parallel/test-stream-buffer-list.js @@ -14,14 +14,19 @@ assert.strictEqual(emptyList.join(','), ''); assert.deepStrictEqual(emptyList.concat(0), Buffer.alloc(0)); +const buf = Buffer.from('foo'); + // Test buffer list with one element. const list = new BufferList(); -list.push('foo'); +list.push(buf); + +const copy = list.concat(3); -assert.strictEqual(list.concat(1), 'foo'); +assert.notStrictEqual(copy, buf); +assert.deepStrictEqual(copy, buf); assert.strictEqual(list.join(','), 'foo'); const shifted = list.shift(); -assert.strictEqual(shifted, 'foo'); +assert.strictEqual(shifted, buf); assert.deepStrictEqual(list, new BufferList()); From bc2f0a51209b5c48202f509308314443431388a9 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Tue, 30 Jan 2018 00:15:53 +0200 Subject: [PATCH 022/227] doc: linkify missing types Also, alphabetize all types in type-parser.js and fix some nits in type formats. Backport-PR-URL: https://github.com/nodejs/node/pull/19500 PR-URL: https://github.com/nodejs/node/pull/18444 Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- doc/api/cluster.md | 2 +- doc/api/readline.md | 14 +++++------ doc/api/repl.md | 4 +-- doc/api/stream.md | 8 +++--- tools/doc/type-parser.js | 53 ++++++++++++++++++++++++++++++---------- 5 files changed, 54 insertions(+), 27 deletions(-) diff --git a/doc/api/cluster.md b/doc/api/cluster.md index 648c753931d936..0bc768030728ab 100644 --- a/doc/api/cluster.md +++ b/doc/api/cluster.md @@ -265,7 +265,7 @@ It is not emitted in the worker. added: v0.7.7 --> -* Returns: {Worker} A reference to `worker`. +* Returns: {cluster.Worker} A reference to `worker`. In a worker, this function will close all servers, wait for the `'close'` event on those servers, and then disconnect the IPC channel. diff --git a/doc/api/readline.md b/doc/api/readline.md index b88c37e5bdbfa3..04c838a0024ed4 100644 --- a/doc/api/readline.md +++ b/doc/api/readline.md @@ -323,7 +323,7 @@ Interface's `input` *as if it were provided by the user*. added: v0.7.7 --> -* `stream` {Writable} +* `stream` {stream.Writable} * `dir` {number} * `-1` - to the left from cursor * `1` - to the right from cursor @@ -338,7 +338,7 @@ in a specified direction identified by `dir`. added: v0.7.7 --> -* `stream` {Writable} +* `stream` {stream.Writable} The `readline.clearScreenDown()` method clears the given [TTY][] stream from the current position of the cursor down. @@ -349,9 +349,9 @@ added: v0.1.98 --> * `options` {Object} - * `input` {Readable} The [Readable][] stream to listen to. This option is + * `input` {stream.Readable} The [Readable][] stream to listen to. This option is *required*. - * `output` {Writable} The [Writable][] stream to write readline data to. + * `output` {stream.Writable} The [Writable][] stream to write readline data to. * `completer` {Function} An optional function used for Tab autocompletion. * `terminal` {boolean} `true` if the `input` and `output` streams should be treated like a TTY, and have ANSI/VT100 escape codes written to it. @@ -431,7 +431,7 @@ function completer(linePartial, callback) { added: v0.7.7 --> -* `stream` {Writable} +* `stream` {stream.Writable} * `x` {number} * `y` {number} @@ -443,7 +443,7 @@ given [TTY][] `stream`. added: v0.7.7 --> -* `stream` {Readable} +* `stream` {stream.Readable} * `interface` {readline.Interface} The `readline.emitKeypressEvents()` method causes the given [Readable][] @@ -469,7 +469,7 @@ if (process.stdin.isTTY) added: v0.7.7 --> -* `stream` {Writable} +* `stream` {stream.Writable} * `dx` {number} * `dy` {number} diff --git a/doc/api/repl.md b/doc/api/repl.md index 69f709a1be874b..17c44fbe36c7ba 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -381,9 +381,9 @@ added: v0.1.91 * `options` {Object | string} * `prompt` {string} The input prompt to display. Defaults to `> `. - * `input` {Readable} The Readable stream from which REPL input will be read. + * `input` {stream.Readable} The Readable stream from which REPL input will be read. Defaults to `process.stdin`. - * `output` {Writable} The Writable stream to which REPL output will be + * `output` {stream.Writable} The Writable stream to which REPL output will be written. Defaults to `process.stdout`. * `terminal` {boolean} If `true`, specifies that the `output` should be treated as a TTY terminal, and have ANSI/VT100 escape codes written to it. diff --git a/doc/api/stream.md b/doc/api/stream.md index 4e7e897b3dd163..f1b5ef565517ff 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -385,7 +385,7 @@ added: v0.11.15 --> * `encoding` {string} The new default encoding -* Returns: `this` +* Returns: {this} The `writable.setDefaultEncoding()` method sets the default `encoding` for a [Writable][] stream. @@ -771,7 +771,7 @@ readable.isPaused(); // === false added: v0.9.4 --> -* Returns: `this` +* Returns: {this} The `readable.pause()` method will cause a stream in flowing mode to stop emitting [`'data'`][] events, switching out of flowing mode. Any data that @@ -903,7 +903,7 @@ event has been emitted will return `null`. No runtime error will be raised. added: v0.9.4 --> -* Returns: `this` +* Returns: {this} The `readable.resume()` method causes an explicitly paused Readable stream to resume emitting [`'data'`][] events, switching the stream into flowing mode. @@ -926,7 +926,7 @@ added: v0.9.4 --> * `encoding` {string} The encoding to use. -* Returns: `this` +* Returns: {this} The `readable.setEncoding()` method sets the character encoding for data read from the Readable stream. diff --git a/tools/doc/type-parser.js b/tools/doc/type-parser.js index bfe058a42c2cd2..5aa91c39b2a669 100644 --- a/tools/doc/type-parser.js +++ b/tools/doc/type-parser.js @@ -14,31 +14,58 @@ const jsPrimitives = { 'undefined': 'Undefined' }; const jsGlobalTypes = [ - 'Error', 'Object', 'Function', 'Array', 'TypedArray', 'Uint8Array', - 'Uint16Array', 'Uint32Array', 'Int8Array', 'Int16Array', 'Int32Array', - 'Uint8ClampedArray', 'Float32Array', 'Float64Array', 'Date', 'RegExp', - 'ArrayBuffer', 'DataView', 'Promise', 'EvalError', 'RangeError', - 'ReferenceError', 'SyntaxError', 'TypeError', 'URIError' + 'Array', 'ArrayBuffer', 'DataView', 'Date', 'Error', 'EvalError', + 'Float32Array', 'Float64Array', 'Function', 'Int16Array', 'Int32Array', + 'Int8Array', 'Object', 'Promise', 'RangeError', 'ReferenceError', 'RegExp', + 'SharedArrayBuffer', 'SyntaxError', 'TypeError', 'TypedArray', 'URIError', + 'Uint16Array', 'Uint32Array', 'Uint8Array', 'Uint8ClampedArray' ]; const typeMap = { + 'Iterable': + 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol', + 'Iterator': + 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterator_protocol', + + 'this': + 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/this', + 'Buffer': 'buffer.html#buffer_class_buffer', - 'Handle': 'net.html#net_server_listen_handle_backlog_callback', - 'Stream': 'stream.html#stream_stream', - 'stream.Writable': 'stream.html#stream_class_stream_writable', - 'stream.Readable': 'stream.html#stream_class_stream_readable', - 'stream.Duplex': 'stream.html#stream_class_stream_duplex', + 'ChildProcess': 'child_process.html#child_process_class_childprocess', + 'cluster.Worker': 'cluster.html#cluster_class_worker', + 'dgram.Socket': 'dgram.html#dgram_class_dgram_socket', - 'net.Socket': 'net.html#net_class_net_socket', - 'tls.TLSSocket': 'tls.html#tls_class_tls_tlssocket', + + 'Domain': 'domain.html#domain_class_domain', + 'EventEmitter': 'events.html#events_class_eventemitter', - 'Timer': 'timers.html#timers_timers', + + 'fs.Stats': 'fs.html#fs_class_fs_stats', + 'http.Agent': 'http.html#http_class_http_agent', 'http.ClientRequest': 'http.html#http_class_http_clientrequest', 'http.IncomingMessage': 'http.html#http_class_http_incomingmessage', 'http.Server': 'http.html#http_class_http_server', 'http.ServerResponse': 'http.html#http_class_http_serverresponse', + + 'Handle': 'net.html#net_server_listen_handle_backlog_callback', + 'net.Server': 'net.html#net_class_net_server', + 'net.Socket': 'net.html#net_class_net_socket', + + 'readline.Interface': 'readline.html#readline_class_interface', + + 'Stream': 'stream.html#stream_stream', + 'stream.Duplex': 'stream.html#stream_class_stream_duplex', + 'stream.Readable': 'stream.html#stream_class_stream_readable', + 'stream.Writable': 'stream.html#stream_class_stream_writable', + + 'Immediate': 'timers.html#timers_class_immediate', + 'Timeout': 'timers.html#timers_class_timeout', + 'Timer': 'timers.html#timers_timers', + + 'tls.TLSSocket': 'tls.html#tls_class_tls_tlssocket', + 'URL': 'url.html#url_the_whatwg_url_api', 'URLSearchParams': 'url.html#url_class_urlsearchparams' }; From 6ff763bd665a5fddbff0948bd6b4e91cbcec3216 Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Mon, 11 Dec 2017 18:08:20 +0100 Subject: [PATCH 023/227] win, build: fix without-intl option Fixes --with-intl option passed to configure script when without-intl is used Backport-PR-URL: https://github.com/nodejs/node/pull/19485 PR-URL: https://github.com/nodejs/node/pull/17614 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Richard Lau Reviewed-By: Gireesh Punathil --- vcbuild.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vcbuild.bat b/vcbuild.bat index e6a6d0d20f7ac2..815a35d4c7030a 100644 --- a/vcbuild.bat +++ b/vcbuild.bat @@ -85,7 +85,7 @@ if /i "%1"=="upload" set upload=1&goto arg-ok if /i "%1"=="small-icu" set i18n_arg=%1&goto arg-ok if /i "%1"=="full-icu" set i18n_arg=%1&goto arg-ok if /i "%1"=="intl-none" set i18n_arg=%1&goto arg-ok -if /i "%1"=="without-intl" set i18n_arg=%1&goto arg-ok +if /i "%1"=="without-intl" set i18n_arg=none&goto arg-ok if /i "%1"=="download-all" set download_arg="--download=all"&goto arg-ok if /i "%1"=="ignore-flaky" set test_args=%test_args% --flaky-tests=dontcare&goto arg-ok if /i "%1"=="enable-vtune" set enable_vtune_arg=1&goto arg-ok From a91b1b928ca7705e03f1002989677811c8098799 Mon Sep 17 00:00:00 2001 From: Birunthan Mohanathas Date: Mon, 22 Jan 2018 22:25:34 +0530 Subject: [PATCH 024/227] win, build: fix intl-none option Like #17614, but for the `intl-none` option. Backport-PR-URL: https://github.com/nodejs/node/pull/19485 Refs: https://github.com/nodejs/node/pull/17614 PR-URL: https://github.com/nodejs/node/pull/18292 Reviewed-By: Richard Lau Reviewed-By: Gireesh Punathil Reviewed-By: James M Snell Reviewed-By: Bartosz Sosnowski --- vcbuild.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vcbuild.bat b/vcbuild.bat index 815a35d4c7030a..a46c99d75916e6 100644 --- a/vcbuild.bat +++ b/vcbuild.bat @@ -84,7 +84,7 @@ if /i "%1"=="build-release" set build_release=1&goto arg-ok if /i "%1"=="upload" set upload=1&goto arg-ok if /i "%1"=="small-icu" set i18n_arg=%1&goto arg-ok if /i "%1"=="full-icu" set i18n_arg=%1&goto arg-ok -if /i "%1"=="intl-none" set i18n_arg=%1&goto arg-ok +if /i "%1"=="intl-none" set i18n_arg=none&goto arg-ok if /i "%1"=="without-intl" set i18n_arg=none&goto arg-ok if /i "%1"=="download-all" set download_arg="--download=all"&goto arg-ok if /i "%1"=="ignore-flaky" set test_args=%test_args% --flaky-tests=dontcare&goto arg-ok From 988cca841ed0b2f22d4f06036a95f9209d54c7c3 Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Tue, 30 Jan 2018 20:27:17 +0100 Subject: [PATCH 025/227] process: fix reading zero-length env vars on win32 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Up until now, Node did not clear the current error code attempting to read environment variables on Windows. Since checking the error code is the way we distinguish between missing and zero-length environment variables, this could lead to a false positive when the error code was still tainted. In the simplest case, accessing a missing variable and then a zero-length one would lead Node to believe that both calls yielded an error. Before: > process.env.I=''; process.env.Q; process.env.I undefined > process.env.I=''; /*process.env.Q;*/ process.env.I '' After: > process.env.I=''; process.env.Q; process.env.I '' > process.env.I=''; /*process.env.Q;*/ process.env.I '' This only affects Node 8 and above, since before 1aa595e5bd64241451b3884d3029b9b9aa97c42d we always constructed a `v8::String::Value` instance for passing the lookup key to the OS, which in in turn always made a heap allocation and therefore reset the error code. Backport-PR-URL: https://github.com/nodejs/node/pull/19484 PR-URL: https://github.com/nodejs/node/pull/18463 Reviewed-By: Ben Noordhuis Reviewed-By: Jeremiah Senkpiel Reviewed-By: Fedor Indutny Reviewed-By: Tobias Nießen Reviewed-By: Colin Ihrig Reviewed-By: Michaël Zasso Reviewed-By: James M Snell Reviewed-By: Tiancheng "Timothy" Gu Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Ruben Bridgewater --- src/node.cc | 2 ++ .../test-process-env-windows-error-reset.js | 22 +++++++++++++++++++ 2 files changed, 24 insertions(+) create mode 100644 test/parallel/test-process-env-windows-error-reset.js diff --git a/src/node.cc b/src/node.cc index 8ab9e4d1f45f51..be82939e00b708 100644 --- a/src/node.cc +++ b/src/node.cc @@ -2799,6 +2799,7 @@ static void EnvGetter(Local property, #else // _WIN32 String::Value key(property); WCHAR buffer[32767]; // The maximum size allowed for environment variables. + SetLastError(ERROR_SUCCESS); DWORD result = GetEnvironmentVariableW(reinterpret_cast(*key), buffer, arraysize(buffer)); @@ -2846,6 +2847,7 @@ static void EnvQuery(Local property, #else // _WIN32 String::Value key(property); WCHAR* key_ptr = reinterpret_cast(*key); + SetLastError(ERROR_SUCCESS); if (GetEnvironmentVariableW(key_ptr, nullptr, 0) > 0 || GetLastError() == ERROR_SUCCESS) { rc = 0; diff --git a/test/parallel/test-process-env-windows-error-reset.js b/test/parallel/test-process-env-windows-error-reset.js new file mode 100644 index 00000000000000..59e5f287d82346 --- /dev/null +++ b/test/parallel/test-process-env-windows-error-reset.js @@ -0,0 +1,22 @@ +'use strict'; +require('../common'); +const assert = require('assert'); + +// This checks that after accessing a missing env var, a subsequent +// env read will succeed even for empty variables. + +{ + process.env.FOO = ''; + process.env.NONEXISTENT_ENV_VAR; + const foo = process.env.FOO; + + assert.strictEqual(foo, ''); +} + +{ + process.env.FOO = ''; + process.env.NONEXISTENT_ENV_VAR; + const hasFoo = 'FOO' in process.env; + + assert.strictEqual(hasFoo, true); +} From 0067bccf6f6bcc5558d20b06f8504449030bd885 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Thu, 8 Feb 2018 19:43:05 +0100 Subject: [PATCH 026/227] doc: fix description of createDecipheriv PR-URL: https://github.com/nodejs/node/pull/18651 Refs: https://github.com/nodejs/node/pull/12223 Reviewed-By: Ruben Bridgewater Reviewed-By: Luigi Pinca Reviewed-By: Tiancheng "Timothy" Gu --- doc/api/crypto.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/api/crypto.md b/doc/api/crypto.md index 5f8de17b0b9ff7..18721a5a2b0f0e 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -1172,8 +1172,8 @@ recent OpenSSL releases, `openssl list-cipher-algorithms` will display the available cipher algorithms. The `key` is the raw key used by the `algorithm` and `iv` is an -[initialization vector][]. Both arguments must be `'utf8'` encoded strings or -[buffers][`Buffer`]. +[initialization vector][]. Both arguments must be `'utf8'` encoded strings, +[Buffers][`Buffer`], `TypedArray`, or `DataView`s. ### crypto.createDiffieHellman(prime[, prime_encoding][, generator][, generator_encoding]) * `urlString` {string} The URL string to parse. From bf72ee667e164acfe466d2c171fd8020a9f0c47b Mon Sep 17 00:00:00 2001 From: Matheus Marchini Date: Mon, 12 Feb 2018 12:43:11 -0500 Subject: [PATCH 034/227] doc: add mmarchini to collaborators PR-URL: https://github.com/nodejs/node/pull/18740 Reviewed-By: Joyee Cheung Reviewed-By: Matteo Collina Reviewed-By: Ruben Bridgewater Reviewed-By: Richard Lau --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 43b6b17d959ebe..cc8a324d1520d7 100644 --- a/README.md +++ b/README.md @@ -429,6 +429,8 @@ For more information about the governance of the Node.js project, see **Mikeal Rogers** <mikeal.rogers@gmail.com> * [misterdjules](https://github.com/misterdjules) - **Julien Gilli** <jgilli@nodejs.org> +* [mmarchini](https://github.com/mmarchini) - +**Matheus Marchini** <matheus@sthima.com> * [mscdex](https://github.com/mscdex) - **Brian White** <mscdex@mscdex.net> * [MylesBorins](https://github.com/MylesBorins) - From 057c80b0886464ec9f5899813a5217e27f262097 Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Tue, 13 Feb 2018 02:09:44 -0500 Subject: [PATCH 035/227] doc: move Fedor to TSC Emeritus MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In a conversation Fedor requested that this PR be made. They plan to continue working on core as a Collaborator. It is this committers belief that if Fedor would like to join the TSC again in the future there is no reason that could not be made possible. Thanks for all the hard work! PR-URL: https://github.com/nodejs/node/pull/18752 Reviewed-By: Gireesh Punathil Reviewed-By: Fedor Indutny Reviewed-By: Yuta Hiroto Reviewed-By: Ruben Bridgewater Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Anatoli Papirovski Reviewed-By: Ali Ijaz Sheikh Reviewed-By: Matteo Collina Reviewed-By: Сковорода Никита Андреевич Reviewed-By: Rich Trott Reviewed-By: Jon Moss Reviewed-By: Tobias Nießen Reviewed-By: Joyee Cheung Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Michael Dawson Reviewed-By: Tiancheng "Timothy" Gu Reviewed-By: Evan Lucas --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index cc8a324d1520d7..6d894e9c8e40e8 100644 --- a/README.md +++ b/README.md @@ -249,8 +249,6 @@ For more information about the governance of the Node.js project, see **Jeremiah Senkpiel** <fishrock123@rocketmail.com> * [gibfahn](https://github.com/gibfahn) - **Gibson Fahnestock** <gibfahn@gmail.com> (he/him) -* [indutny](https://github.com/indutny) - -**Fedor Indutny** <fedor.indutny@gmail.com> * [jasnell](https://github.com/jasnell) - **James M Snell** <jasnell@gmail.com> (he/him) * [joyeecheung](https://github.com/joyeecheung) - @@ -280,6 +278,8 @@ For more information about the governance of the Node.js project, see **Ben Noordhuis** <info@bnoordhuis.nl> * [chrisdickinson](https://github.com/chrisdickinson) - **Chris Dickinson** <christopher.s.dickinson@gmail.com> +* [indutny](https://github.com/indutny) - +**Fedor Indutny** <fedor.indutny@gmail.com> * [isaacs](https://github.com/isaacs) - **Isaac Z. Schlueter** <i@izs.me> * [joshgav](https://github.com/joshgav) - From 77a405b92f768a22c2fc2201d9fac4408108e741 Mon Sep 17 00:00:00 2001 From: Aaron Bieber Date: Sat, 3 Feb 2018 09:52:29 -0700 Subject: [PATCH 036/227] lib: set process.execPath on OpenBSD PR-URL: https://github.com/nodejs/node/pull/18543 Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- lib/internal/bootstrap_node.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/internal/bootstrap_node.js b/lib/internal/bootstrap_node.js index 4b5a500adc598e..3cadf1e399e51b 100644 --- a/lib/internal/bootstrap_node.js +++ b/lib/internal/bootstrap_node.js @@ -66,6 +66,13 @@ // URL::ToObject() method is used. NativeModule.require('internal/url'); + // On OpenBSD process.execPath will be relative unless we + // get the full path before process.execPath is used. + if (process.platform === 'openbsd') { + const { realpathSync } = NativeModule.require('fs'); + process.execPath = realpathSync.native(process.execPath); + } + Object.defineProperty(process, 'argv0', { enumerable: true, configurable: false, From d799b1cb615b5da86a5ed8e39c3bc519266af841 Mon Sep 17 00:00:00 2001 From: Aaron Bieber Date: Sat, 3 Feb 2018 09:53:57 -0700 Subject: [PATCH 037/227] test: update a few tests to work on OpenBSD PR-URL: https://github.com/nodejs/node/pull/18543 Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- test/common/index.js | 1 + test/parallel/test-child-process-exec-timeout.js | 11 +++++++++-- test/parallel/test-net-dns-error.js | 8 ++++++-- test/parallel/test-setproctitle.js | 2 +- 4 files changed, 17 insertions(+), 5 deletions(-) diff --git a/test/common/index.js b/test/common/index.js index d762f4e0aa8498..315e35d04195f4 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -29,6 +29,7 @@ exports.isLinuxPPCBE = (process.platform === 'linux') && (os.endianness() === 'BE'); exports.isSunOS = process.platform === 'sunos'; exports.isFreeBSD = process.platform === 'freebsd'; +exports.isOpenBSD = process.platform === 'openbsd'; exports.isLinux = process.platform === 'linux'; exports.isOSX = process.platform === 'darwin'; diff --git a/test/parallel/test-child-process-exec-timeout.js b/test/parallel/test-child-process-exec-timeout.js index 20923d922ab84c..369fab7d01d17a 100644 --- a/test/parallel/test-child-process-exec-timeout.js +++ b/test/parallel/test-child-process-exec-timeout.js @@ -16,15 +16,22 @@ const cmd = `"${process.execPath}" "${__filename}" child`; // Test the case where a timeout is set, and it expires. cp.exec(cmd, { timeout: 1 }, common.mustCall((err, stdout, stderr) => { + let sigterm = 'SIGTERM'; assert.strictEqual(err.killed, true); - assert.strictEqual(err.code, null); + // TODO OpenBSD returns a null signal and 143 for code + if (common.isOpenBSD) { + assert.strictEqual(err.code, 143); + sigterm = null; + } else { + assert.strictEqual(err.code, null); + } // At least starting with Darwin Kernel Version 16.4.0, sending a SIGTERM to a // process that is still starting up kills it with SIGKILL instead of SIGTERM. // See: https://github.com/libuv/libuv/issues/1226 if (common.isOSX) assert.ok(err.signal === 'SIGTERM' || err.signal === 'SIGKILL'); else - assert.strictEqual(err.signal, 'SIGTERM'); + assert.strictEqual(err.signal, sigterm); assert.strictEqual(err.cmd, cmd); assert.strictEqual(stdout.trim(), ''); assert.strictEqual(stderr.trim(), ''); diff --git a/test/parallel/test-net-dns-error.js b/test/parallel/test-net-dns-error.js index 5ca02313686589..5f51c8d5a45c62 100644 --- a/test/parallel/test-net-dns-error.js +++ b/test/parallel/test-net-dns-error.js @@ -6,17 +6,21 @@ const net = require('net'); const host = '*'.repeat(256); +let errCode = 'ENOTFOUND'; +if (common.isOpenBSD) + errCode = 'EAI_FAIL'; + function do_not_call() { throw new Error('This function should not have been called.'); } const socket = net.connect(42, host, do_not_call); socket.on('error', common.mustCall(function(err) { - assert.strictEqual(err.code, 'ENOTFOUND'); + assert.strictEqual(err.code, errCode); })); socket.on('lookup', function(err, ip, type) { assert(err instanceof Error); - assert.strictEqual(err.code, 'ENOTFOUND'); + assert.strictEqual(err.code, errCode); assert.strictEqual(ip, undefined); assert.strictEqual(type, undefined); }); diff --git a/test/parallel/test-setproctitle.js b/test/parallel/test-setproctitle.js index 12aea32ba306c7..0b703445ac20f2 100644 --- a/test/parallel/test-setproctitle.js +++ b/test/parallel/test-setproctitle.js @@ -30,7 +30,7 @@ exec(cmd, common.mustCall((error, stdout, stderr) => { assert.strictEqual(stderr, ''); // freebsd always add ' (procname)' to the process title - if (common.isFreeBSD) + if (common.isFreeBSD || common.isOpenBSD) title += ` (${path.basename(process.execPath)})`; // omitting trailing whitespace and \n From 1edadebaa070db550f0f18a0a7ab638de8780876 Mon Sep 17 00:00:00 2001 From: Luigi Pinca Date: Mon, 19 Feb 2018 14:43:02 +0100 Subject: [PATCH 038/227] http: allow _httpMessage to be GC'ed Set `socket._httpMessage` to `null` before emitting the `'connect'` or `'upgrade'` event. PR-URL: https://github.com/nodejs/node/pull/18865 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Ruben Bridgewater Reviewed-By: Matteo Collina --- lib/_http_agent.js | 1 + test/parallel/test-http-connect.js | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/_http_agent.js b/lib/_http_agent.js index f11fa9fd6d2f5f..ce4cd05005e211 100644 --- a/lib/_http_agent.js +++ b/lib/_http_agent.js @@ -249,6 +249,7 @@ function installListeners(agent, s, options) { s.removeListener('close', onClose); s.removeListener('free', onFree); s.removeListener('agentRemove', onRemove); + s._httpMessage = null; } s.on('agentRemove', onRemove); } diff --git a/test/parallel/test-http-connect.js b/test/parallel/test-http-connect.js index 854d2d893416a0..9854c68be98ce2 100644 --- a/test/parallel/test-http-connect.js +++ b/test/parallel/test-http-connect.js @@ -28,7 +28,11 @@ server.listen(0, common.mustCall(() => { path: 'google.com:443' }, common.mustNotCall()); - req.on('close', common.mustCall(() => {})); + req.on('socket', common.mustCall((socket) => { + assert.strictEqual(socket._httpMessage, req); + })); + + req.on('close', common.mustCall()); req.on('connect', common.mustCall((res, socket, firstBodyChunk) => { // Make sure this request got removed from the pool. @@ -39,6 +43,7 @@ server.listen(0, common.mustCall(() => { // Make sure this socket has detached. assert(!socket.ondata); assert(!socket.onend); + assert.strictEqual(socket._httpMessage, null); assert.strictEqual(socket.listeners('connect').length, 0); assert.strictEqual(socket.listeners('data').length, 0); From 18acad349c0f0bacdd7e318a5c7a509cad9574c9 Mon Sep 17 00:00:00 2001 From: Luigi Pinca Date: Sun, 18 Mar 2018 10:59:44 +0100 Subject: [PATCH 039/227] http: make socketPath work with no agent Currently `Agent.prototype.createConnection()` is called uncoditionally if the `socketPath` option is used. This throws an error if no agent is used, preventing, for example, the `socketPath` and `createConnection` options to be used together. This commit fixes the issue by falling back to the `createConnection` option or `net.createConnection()`. PR-URL: https://github.com/nodejs/node/pull/19425 Reviewed-By: Rod Vagg Reviewed-By: Matteo Collina Reviewed-By: Matheus Marchini Reviewed-By: Chen Gang --- lib/_http_client.js | 6 +++- .../test-http-client-unix-socket-no-agent.js | 28 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 test/parallel/test-http-client-unix-socket-no-agent.js diff --git a/lib/_http_client.js b/lib/_http_client.js index 9d2057814133b7..ea181cff31a482 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -138,7 +138,11 @@ function ClientRequest(options, cb) { timeout: self.timeout, rejectUnauthorized: !!options.rejectUnauthorized }; - const newSocket = self.agent.createConnection(optionsPath, oncreate); + const newSocket = self.agent + ? self.agent.createConnection(optionsPath, oncreate) + : typeof options.createConnection === 'function' + ? options.createConnection(optionsPath, oncreate) + : net.createConnection(optionsPath); if (newSocket && !called) { called = true; self.onSocket(newSocket); diff --git a/test/parallel/test-http-client-unix-socket-no-agent.js b/test/parallel/test-http-client-unix-socket-no-agent.js new file mode 100644 index 00000000000000..9dd11403c6bf79 --- /dev/null +++ b/test/parallel/test-http-client-unix-socket-no-agent.js @@ -0,0 +1,28 @@ +'use strict'; +const common = require('../common'); +const Countdown = require('../common/countdown'); + +const http = require('http'); +const { createConnection } = require('net'); + +const server = http.createServer((req, res) => { + res.end(); +}); + +const countdown = new Countdown(2, () => { + server.close(); +}); + +common.refreshTmpDir(); + +server.listen(common.PIPE, common.mustCall(() => { + http.get({ createConnection, socketPath: common.PIPE }, onResponse); + http.get({ agent: 0, socketPath: common.PIPE }, onResponse); +})); + +function onResponse(res) { + res.on('end', () => { + countdown.dec(); + }); + res.resume(); +} From 4d67369c1b4a0a98f1c6a3a29a6b334c4de1e988 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Mon, 2 Apr 2018 19:27:57 +0300 Subject: [PATCH 040/227] doc: fix various nits MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Replace 2 hyphens (--) by spaced m-dashes (—) as per STYLE_GUIDE.md. * Space infix operators. * Unify quotes in inline code spans (use only single quotes). * Unify `* Returns:` (eliminate deviations). * Dedupe spaces. Backport-PR-URL: https://github.com/nodejs/node/pull/19761 PR-URL: https://github.com/nodejs/node/pull/19743 Reviewed-By: James M Snell Reviewed-By: Trivikram Kamat --- doc/api/addons.md | 2 +- doc/api/buffer.md | 2 +- doc/api/child_process.md | 17 +++++----- doc/api/cli.md | 4 +-- doc/api/cluster.md | 24 +++++++------- doc/api/console.md | 2 +- doc/api/crypto.md | 2 +- doc/api/debugger.md | 2 +- doc/api/dns.md | 6 ++-- doc/api/documentation.md | 6 ++-- doc/api/domain.md | 32 +++++++++--------- doc/api/errors.md | 16 ++++----- doc/api/fs.md | 38 ++++++++++----------- doc/api/globals.md | 6 ++-- doc/api/http.md | 72 ++++++++++++++++++++-------------------- doc/api/https.md | 2 +- doc/api/modules.md | 56 +++++++++++++++---------------- doc/api/net.md | 32 +++++++++--------- doc/api/os.md | 4 +-- doc/api/path.md | 6 ++-- doc/api/process.md | 42 +++++++++++------------ doc/api/readline.md | 2 +- doc/api/repl.md | 10 +++--- doc/api/stream.md | 8 ++--- doc/api/tls.md | 24 +++++++------- doc/api/url.md | 4 +-- doc/api/util.md | 10 +++--- doc/api/v8.md | 2 +- doc/api/vm.md | 2 +- doc/api/zlib.md | 20 +++++------ 30 files changed, 228 insertions(+), 227 deletions(-) diff --git a/doc/api/addons.md b/doc/api/addons.md index 17811245f1c23e..4a9b72e1661612 100644 --- a/doc/api/addons.md +++ b/doc/api/addons.md @@ -101,7 +101,7 @@ Addon module name is `addon`. Once the source code has been written, it must be compiled into the binary `addon.node` file. To do so, create a file called `binding.gyp` in the top-level of the project describing the build configuration of your module -using a JSON-like format. This file is used by [node-gyp][] -- a tool written +using a JSON-like format. This file is used by [node-gyp][] — a tool written specifically to compile Node.js Addons. ```json diff --git a/doc/api/buffer.md b/doc/api/buffer.md index 9583fa15949fd2..2668d43e8754f4 100644 --- a/doc/api/buffer.md +++ b/doc/api/buffer.md @@ -395,7 +395,7 @@ deprecated: v6.0.0 * `size` {integer} The desired length of the new `Buffer`. -Allocates a new `Buffer` of `size` bytes. The `size` must be less than or equal +Allocates a new `Buffer` of `size` bytes. The `size` must be less than or equal to the value of [`buffer.kMaxLength`]. Otherwise, a [`RangeError`] is thrown. A zero-length `Buffer` will be created if `size <= 0`. diff --git a/doc/api/child_process.md b/doc/api/child_process.md index b55bd0fea1d874..4759d9bd4a9337 100644 --- a/doc/api/child_process.md +++ b/doc/api/child_process.md @@ -38,7 +38,7 @@ the event loop until the spawned process either exits or is terminated. For convenience, the `child_process` module provides a handful of synchronous and asynchronous alternatives to [`child_process.spawn()`][] and -[`child_process.spawnSync()`][]. *Note that each of these alternatives are +[`child_process.spawnSync()`][]. *Note that each of these alternatives are implemented on top of [`child_process.spawn()`][] or [`child_process.spawnSync()`][].* * [`child_process.exec()`][]: spawns a shell and runs a command within that shell, @@ -140,7 +140,8 @@ added: v0.1.90 command line parsing should be compatible with `cmd.exe`. * `timeout` {number} **Default:** `0` * [`maxBuffer`][] {number} Largest amount of data (in bytes) allowed on - stdout or stderr - if exceeded child process is killed. **Default:** `200*1024` + stdout or stderr - if exceeded child process is killed. + **Default:** `200 * 1024`. * `killSignal` {string|integer} **Default:** `'SIGTERM'` * `uid` {number} Sets the user identity of the process (see setuid(2)). * `gid` {number} Sets the group identity of the process (see setgid(2)). @@ -170,7 +171,7 @@ exec('cat *.js bad_file | wc -l', (error, stdout, stderr) => { ``` If a `callback` function is provided, it is called with the arguments -`(error, stdout, stderr)`. On success, `error` will be `null`. On error, +`(error, stdout, stderr)`. On success, `error` will be `null`. On error, `error` will be an instance of [`Error`][]. The `error.code` property will be the exit code of the child process while `error.signal` will be set to the signal that terminated the process. Any exit code other than `0` is considered @@ -218,7 +219,7 @@ added: v0.1.91 * `timeout` {number} **Default:** `0` * [`maxBuffer`][] {number} Largest amount of data (in bytes) allowed on stdout or stderr - if exceeded child process is killed. - **Default:**: `200*1024` + **Default:** `200 * 1024`. * `killSignal` {string|integer} **Default:** `'SIGTERM'` * `uid` {number} Sets the user identity of the process (see setuid(2)). * `gid` {number} Sets the group identity of the process (see setgid(2)). @@ -446,7 +447,7 @@ disabled*. On non-Windows platforms, if `options.detached` is set to `true`, the child process will be made the leader of a new process group and session. Note that child processes may continue running after the parent exits regardless of -whether they are detached or not. See setsid(2) for more information. +whether they are detached or not. See setsid(2) for more information. By default, the parent will wait for the detached child to exit. To prevent the parent from waiting for a given `subprocess`, use the `subprocess.unref()` @@ -656,12 +657,12 @@ The `child_process.execSync()` method is generally identical to [`child_process.exec()`][] with the exception that the method will not return until the child process has fully closed. When a timeout has been encountered and `killSignal` is sent, the method won't return until the process has completely -exited. *Note that if the child process intercepts and handles the `SIGTERM` +exited. *Note that if the child process intercepts and handles the `SIGTERM` signal and doesn't exit, the parent process will wait until the child process has exited.* If the process times out, or has a non-zero exit code, this method ***will*** -throw. The [`Error`][] object will contain the entire result from +throw. The [`Error`][] object will contain the entire result from [`child_process.spawnSync()`][] **Note: Never pass unsanitised user input to this function. Any input @@ -997,7 +998,7 @@ properties: Defaults to `false`. The optional `callback` is a function that is invoked after the message is -sent but before the child may have received it. The function is called with a +sent but before the child may have received it. The function is called with a single argument: `null` on success, or an [`Error`][] object on failure. If no `callback` function is provided and the message cannot be sent, an diff --git a/doc/api/cli.md b/doc/api/cli.md index f1b33e883ae883..a8aee18e3efd3e 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -342,7 +342,7 @@ added: v6.12.0 --> `options...` are interpreted as if they had been specified on the command line -before the actual command line (so they can be overriden). Node will exit with +before the actual command line (so they can be overriden). Node will exit with an error if an option that is not allowed in the environment is used, such as `-p` or a script file. @@ -382,7 +382,7 @@ added: v3.0.0 Path to the file used to store the persistent REPL history. The default path is `~/.node_repl_history`, which is overridden by this variable. Setting the value -to an empty string (`""` or `" "`) disables persistent REPL history. +to an empty string (`''` or `' '`) disables persistent REPL history. ### `NODE_TTY_UNSAFE_ASYNC=1` diff --git a/doc/api/cluster.md b/doc/api/cluster.md index 0bc768030728ab..fd5b8398bceb69 100644 --- a/doc/api/cluster.md +++ b/doc/api/cluster.md @@ -90,26 +90,26 @@ Node.js process and a cluster worker differs: idea of what the number 7 file descriptor references. 2. `server.listen(handle)` Listening on handles explicitly will cause the worker to use the supplied handle, rather than talk to the master - process. If the worker already has the handle, then it's presumed + process. If the worker already has the handle, then it's presumed that you know what you are doing. 3. `server.listen(0)` Normally, this will cause servers to listen on a - random port. However, in a cluster, each worker will receive the - same "random" port each time they do `listen(0)`. In essence, the - port is random the first time, but predictable thereafter. If you + random port. However, in a cluster, each worker will receive the + same "random" port each time they do `listen(0)`. In essence, the + port is random the first time, but predictable thereafter. If you want to listen on a unique port, generate a port number based on the cluster worker ID. There is no routing logic in Node.js, or in your program, and no shared -state between the workers. Therefore, it is important to design your +state between the workers. Therefore, it is important to design your program such that it does not rely too heavily on in-memory data objects for things like sessions and login. Because workers are all separate processes, they can be killed or re-spawned depending on your program's needs, without affecting other -workers. As long as there are some workers still alive, the server will -continue to accept connections. If no workers are alive, existing connections -will be dropped and new connections will be refused. Node.js does not -automatically manage the number of workers for you, however. It is your +workers. As long as there are some workers still alive, the server will +continue to accept connections. If no workers are alive, existing connections +will be dropped and new connections will be refused. Node.js does not +automatically manage the number of workers for you, however. It is your responsibility to manage the worker pool for your application's needs. Although a primary use case for the `cluster` module is networking, it can @@ -489,7 +489,7 @@ Emitted after the worker IPC channel has disconnected. This can occur when a worker exits gracefully, is killed, or is disconnected manually (such as with worker.disconnect()). -There may be a delay between the `'disconnect'` and `'exit'` events. These events +There may be a delay between the `'disconnect'` and `'exit'` events. These events can be used to detect if the process is stuck in a cleanup or if there are long-living connections. @@ -582,7 +582,7 @@ The `addressType` is one of: * `4` (TCPv4) * `6` (TCPv6) * `-1` (unix domain socket) -* `"udp4"` or `"udp6"` (UDP v4 or v6) +* `'udp4'` or `'udp6'` (UDP v4 or v6) ## Event: 'message' @@ -709,7 +709,7 @@ distribute IOCP handles without incurring a large performance hit. `cluster.schedulingPolicy` can also be set through the `NODE_CLUSTER_SCHED_POLICY` environment variable. Valid -values are `"rr"` and `"none"`. +values are `'rr'` and `'none'`. ## cluster.settings Returns the Diffie-Hellman generator in the specified `encoding`, which can -be `'latin1'`, `'hex'`, or `'base64'`. If `encoding` is provided a string is +be `'latin1'`, `'hex'`, or `'base64'`. If `encoding` is provided a string is returned; otherwise a [`Buffer`][] is returned. ### diffieHellman.getPrime([encoding]) diff --git a/doc/api/debugger.md b/doc/api/debugger.md index a24437021b7bea..92b49fc9ed77eb 100644 --- a/doc/api/debugger.md +++ b/doc/api/debugger.md @@ -68,7 +68,7 @@ debug> repl Press Ctrl + C to leave debug repl > x 5 -> 2+2 +> 2 + 2 4 debug> next break in /home/indutny/Code/git/indutny/myscript.js:5 diff --git a/doc/api/dns.md b/doc/api/dns.md index 7fce91970aa10e..e4321b487340cf 100644 --- a/doc/api/dns.md +++ b/doc/api/dns.md @@ -212,7 +212,7 @@ will contain an array of IPv4 addresses (e.g. * `options` {Object} * `ttl` {boolean} Retrieve the Time-To-Live value (TTL) of each record. The callback receives an array of `{ address: '1.2.3.4', ttl: 60 }` objects - rather than an array of strings. The TTL is expressed in seconds. + rather than an array of strings. The TTL is expressed in seconds. * `callback` {Function} An `(err, result)` callback function. ## dns.resolve6(hostname[, options], callback) @@ -228,7 +228,7 @@ will contain an array of IPv6 addresses. * `options` {Object} * `ttl` {boolean} Retrieve the Time-To-Live value (TTL) of each record. The callback receives an array of `{ address: '0:1:2:3:4:5:6:7', ttl: 60 }` - objects rather than an array of strings. The TTL is expressed in seconds. + objects rather than an array of strings. The TTL is expressed in seconds. * `callback` {Function} An `(err, result)` callback function. ## dns.resolveCname(hostname, callback) @@ -258,7 +258,7 @@ added: v0.9.12 Uses the DNS protocol to resolve regular expression based records (`NAPTR` records) for the `hostname`. The `callback` function has arguments -`(err, addresses)`. The `addresses` argument passed to the `callback` function +`(err, addresses)`. The `addresses` argument passed to the `callback` function will contain an array of objects with the following properties: * `flags` diff --git a/doc/api/documentation.md b/doc/api/documentation.md index 917e3e8134a37c..8a768949f0530b 100644 --- a/doc/api/documentation.md +++ b/doc/api/documentation.md @@ -26,10 +26,10 @@ using the `tools/doc/generate.js` program. An HTML template is located at Throughout the documentation, you will see indications of a section's -stability. The Node.js API is still somewhat changing, and as it -matures, certain parts are more reliable than others. Some are so +stability. The Node.js API is still somewhat changing, and as it +matures, certain parts are more reliable than others. Some are so proven, and so relied upon, that they are unlikely to ever change at -all. Others are brand new and experimental, or known to be hazardous +all. Others are brand new and experimental, or known to be hazardous and in the process of being redesigned. The stability indices are as follows: diff --git a/doc/api/domain.md b/doc/api/domain.md index 08d6edc7d8cb24..99a44925034827 100644 --- a/doc/api/domain.md +++ b/doc/api/domain.md @@ -12,7 +12,7 @@ but should expect to have to migrate to a different solution in the future. Domains provide a way to handle multiple different IO operations as a -single group. If any of the event emitters or callbacks registered to a +single group. If any of the event emitters or callbacks registered to a domain emit an `'error'` event, or throw an error, then the domain object will be notified, rather than losing the context of the error in the `process.on('uncaughtException')` handler, or causing the program to @@ -30,7 +30,7 @@ never any way to safely "pick up where you left off", without leaking references, or creating some other sort of undefined brittle state. The safest way to respond to a thrown error is to shut down the -process. Of course, in a normal web server, you might have many +process. Of course, in a normal web server, you might have many connections open, and it is not reasonable to abruptly shut those down because an error was triggered by someone else. @@ -40,7 +40,7 @@ time, and stop listening for new requests in that worker. In this way, `domain` usage goes hand-in-hand with the cluster module, since the master process can fork a new worker when a worker -encounters an error. For Node.js programs that scale to multiple +encounters an error. For Node.js programs that scale to multiple machines, the terminating proxy or service registry can take note of the failure, and react accordingly. @@ -103,7 +103,7 @@ if (cluster.isMaster) { const domain = require('domain'); // See the cluster documentation for more details about using - // worker processes to serve requests. How it works, caveats, etc. + // worker processes to serve requests. How it works, caveats, etc. const server = require('http').createServer((req, res) => { const d = domain.create(); @@ -126,7 +126,7 @@ if (cluster.isMaster) { // stop taking new requests. server.close(); - // Let the master know we're dead. This will trigger a + // Let the master know we're dead. This will trigger a // 'disconnect' in the cluster master, and then it will fork // a new worker. cluster.worker.disconnect(); @@ -155,7 +155,7 @@ if (cluster.isMaster) { server.listen(PORT); } -// This part is not important. Just an example routing thing. +// This part is not important. Just an example routing thing. // You'd put your fancy application logic here. function handleRequest(req, res) { switch (req.url) { @@ -197,11 +197,11 @@ the active domain at the time of their creation. Additionally, callbacks passed to lowlevel event loop requests (such as to fs.open, or other callback-taking methods) will automatically be -bound to the active domain. If they throw, then the domain will catch +bound to the active domain. If they throw, then the domain will catch the error. In order to prevent excessive memory usage, Domain objects themselves -are not implicitly added as children of the active domain. If they +are not implicitly added as children of the active domain. If they were, then it would be too easy to prevent request and response objects from being properly garbage collected. @@ -218,7 +218,7 @@ Implicit binding only takes care of thrown errors and `'error'` events. Sometimes, the domain in use is not the one that ought to be used for a -specific event emitter. Or, the event emitter could have been created +specific event emitter. Or, the event emitter could have been created in the context of one domain, but ought to instead be bound to some other domain. @@ -268,7 +268,7 @@ Returns a new Domain object. The Domain class encapsulates the functionality of routing errors and uncaught exceptions to the active Domain object. -Domain is a child class of [`EventEmitter`][]. To handle the errors that it +Domain is a child class of [`EventEmitter`][]. To handle the errors that it catches, listen to its `'error'` event. ### domain.run(fn[, ...args]) @@ -318,13 +318,13 @@ to the domain. * `emitter` {EventEmitter|Timer} emitter or timer to be added to the domain -Explicitly adds an emitter to the domain. If any event handlers called by +Explicitly adds an emitter to the domain. If any event handlers called by the emitter throw an error, or if the emitter emits an `'error'` event, it will be routed to the domain's `'error'` event, just like with implicit binding. This also works with timers that are returned from [`setInterval()`][] and -[`setTimeout()`][]. If their callback function throws, it will be caught by +[`setTimeout()`][]. If their callback function throws, it will be caught by the domain 'error' handler. If the Timer or EventEmitter was already bound to a domain, it is removed @@ -334,7 +334,7 @@ from that one, and bound to this one instead. * `emitter` {EventEmitter|Timer} emitter or timer to be removed from the domain -The opposite of [`domain.add(emitter)`][]. Removes domain handling from the +The opposite of [`domain.add(emitter)`][]. Removes domain handling from the specified emitter. ### domain.bind(callback) @@ -343,7 +343,7 @@ specified emitter. * Returns: {Function} The bound function The returned function will be a wrapper around the supplied callback -function. When the returned function is called, any errors that are +function. When the returned function is called, any errors that are thrown will be routed to the domain's `'error'` event. #### Example @@ -370,7 +370,7 @@ d.on('error', (er) => { * `callback` {Function} The callback function * Returns: {Function} The intercepted function -This method is almost identical to [`domain.bind(callback)`][]. However, in +This method is almost identical to [`domain.bind(callback)`][]. However, in addition to catching thrown errors, it will also intercept [`Error`][] objects sent as the first argument to the function. @@ -439,7 +439,7 @@ without exiting the domain. ### domain.dispose() -> Stability: 0 - Deprecated. Please recover from failed IO actions +> Stability: 0 - Deprecated. Please recover from failed IO actions > explicitly via error event handlers set on the domain. Once `dispose` has been called, the domain will no longer be used by callbacks diff --git a/doc/api/errors.md b/doc/api/errors.md index 49b4bac595a6dc..306b58a908ee5d 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -107,7 +107,7 @@ pass or fail). For *all* `EventEmitter` objects, if an `'error'` event handler is not provided, the error will be thrown, causing the Node.js process to report an -unhandled exception and crash unless either: The [`domain`][domains] module is used +unhandled exception and crash unless either: The [`domain`][domains] module is used appropriately or a handler has been registered for the [`process.on('uncaughtException')`][] event. @@ -133,7 +133,7 @@ exactly how errors raised by those methods are propagated. Most asynchronous methods exposed by the Node.js core API follow an idiomatic -pattern referred to as a "Node.js style callback". With this pattern, a +pattern referred to as a "Node.js style callback". With this pattern, a callback function is passed to the method as an argument. When the operation either completes or an error is raised, the callback function is called with the Error object (if any) passed as the first argument. If no error was raised, @@ -155,7 +155,7 @@ fs.readFile('/some/file/that/does-exist', nodeStyleCallback); ``` The JavaScript `try / catch` mechanism **cannot** be used to intercept errors -generated by asynchronous APIs. A common mistake for beginners is to try to +generated by asynchronous APIs. A common mistake for beginners is to try to use `throw` inside a Node.js style callback: ```js @@ -204,7 +204,7 @@ provided text message. If an object is passed as `message`, the text message is generated by calling `message.toString()`. The `error.stack` property will represent the point in the code at which `new Error()` was called. Stack traces are dependent on [V8's stack trace API][]. Stack traces extend only to either -(a) the beginning of *synchronous code execution*, or (b) the number of frames +(a) the beginning of *synchronous code execution*, or (b) the number of frames given by the property `Error.stackTraceLimit`, whichever is smaller. ### Error.captureStackTrace(targetObject[, constructorOpt]) @@ -502,7 +502,7 @@ found [here][online]. - `EACCES` (Permission denied): An attempt was made to access a file in a way forbidden by its file access permissions. -- `EADDRINUSE` (Address already in use): An attempt to bind a server +- `EADDRINUSE` (Address already in use): An attempt to bind a server ([`net`][], [`http`][], or [`https`][]) to a local address failed due to another server on the local system already occupying that address. @@ -530,14 +530,14 @@ found [here][online]. `ulimit -n 2048` in the same shell that will run the Node.js process. - `ENOENT` (No such file or directory): Commonly raised by [`fs`][] operations - to indicate that a component of the specified pathname does not exist -- no + to indicate that a component of the specified pathname does not exist — no entity (file or directory) could be found by the given path. - `ENOTDIR` (Not a directory): A component of the given pathname existed, but was not a directory as expected. Commonly raised by [`fs.readdir`][]. - `ENOTEMPTY` (Directory not empty): A directory with entries was the target - of an operation that requires an empty directory -- usually [`fs.unlink`][]. + of an operation that requires an empty directory — usually [`fs.unlink`][]. - `EPERM` (Operation not permitted): An attempt was made to perform an operation that requires elevated privileges. @@ -549,7 +549,7 @@ found [here][online]. - `ETIMEDOUT` (Operation timed out): A connect or send request failed because the connected party did not properly respond after a period of time. Usually - encountered by [`http`][] or [`net`][] -- often a sign that a `socket.end()` + encountered by [`http`][] or [`net`][] — often a sign that a `socket.end()` was not properly called. [`fs.readdir`]: fs.html#fs_fs_readdir_path_options_callback diff --git a/doc/api/fs.md b/doc/api/fs.md index b47e639e3ca98f..74e3cc3c460c29 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -6,7 +6,7 @@ -File I/O is provided by simple wrappers around standard POSIX functions. To +File I/O is provided by simple wrappers around standard POSIX functions. To use this module do `require('fs')`. All the methods have asynchronous and synchronous forms. @@ -67,7 +67,7 @@ fs.rename('/tmp/hello', '/tmp/world', (err) => { In busy processes, the programmer is _strongly encouraged_ to use the asynchronous versions of these calls. The synchronous versions will block -the entire process until they complete--halting all connections. +the entire process until they complete — halting all connections. The relative path to a filename can be used. Remember, however, that this path will be relative to `process.cwd()`. @@ -252,16 +252,16 @@ page. The times in the stat object have the following semantics: -* `atime` "Access Time" - Time when file data last accessed. Changed +* `atime` "Access Time" - Time when file data last accessed. Changed by the mknod(2), utimes(2), and read(2) system calls. * `mtime` "Modified Time" - Time when file data last modified. Changed by the mknod(2), utimes(2), and write(2) system calls. * `ctime` "Change Time" - Time when file status was last changed - (inode data modification). Changed by the chmod(2), chown(2), + (inode data modification). Changed by the chmod(2), chown(2), link(2), mknod(2), rename(2), unlink(2), utimes(2), read(2), and write(2) system calls. -* `birthtime` "Birth Time" - Time of file creation. Set once when the - file is created. On filesystems where birthtime is not available, +* `birthtime` "Birth Time" - Time of file creation. Set once when the + file is created. On filesystems where birthtime is not available, this field may instead hold either the `ctime` or `1970-01-01T00:00Z` (ie, unix epoch timestamp `0`). Note that this value may be greater than `atime` or `mtime` in this case. On Darwin @@ -270,7 +270,7 @@ The times in the stat object have the following semantics: utimes(2) system call. Prior to Node v0.12, the `ctime` held the `birthtime` on Windows -systems. Note that as of v0.12, `ctime` is not "creation time", and +systems. Note that as of v0.12, `ctime` is not "creation time", and on Unix systems, it never was. ## Class: fs.WriteStream @@ -567,7 +567,7 @@ added: v0.0.2 * `callback` {Function} * `err` {Error} -Asynchronous close(2). No arguments other than a possible exception are given +Asynchronous close(2). No arguments other than a possible exception are given to the completion callback. ## fs.closeSync(fd) @@ -621,7 +621,7 @@ const defaults = { ``` `options` can include `start` and `end` values to read a range of bytes from -the file instead of the entire file. Both `start` and `end` are inclusive and +the file instead of the entire file. Both `start` and `end` are inclusive and start counting at 0. If `fd` is specified and `start` is omitted or `undefined`, `fs.createReadStream()` reads sequentially from the current file position. The `encoding` can be any one of those accepted by [`Buffer`][]. @@ -632,8 +632,8 @@ emitted. Note that `fd` should be blocking; non-blocking `fd`s should be passed to [`net.Socket`][]. If `autoClose` is false, then the file descriptor won't be closed, even if -there's an error. It is your responsibility to close it and make sure -there's no file descriptor leak. If `autoClose` is set to true (default +there's an error. It is your responsibility to close it and make sure +there's no file descriptor leak. If `autoClose` is set to true (default behavior), on `error` or `end` the file descriptor will be closed automatically. @@ -677,7 +677,7 @@ const defaults = { ``` `options` may also include a `start` option to allow writing data at -some position past the beginning of the file. Modifying a file rather +some position past the beginning of the file. Modifying a file rather than replacing it may require a `flags` mode of `r+` rather than the default mode `w`. The `defaultEncoding` can be any one of those accepted by [`Buffer`][]. @@ -708,7 +708,7 @@ deprecated: v1.0.0 * `exists` {Boolean} Test whether or not the given path exists by checking with the file system. -Then call the `callback` argument with either true or false. Example: +Then call the `callback` argument with either true or false. Example: ```js fs.exists('/etc/passwd', (exists) => { @@ -1286,7 +1286,7 @@ to a non-existent file. The exclusive flag may or may not work with network file systems. `flags` can also be a number as documented by open(2); commonly used constants -are available from `fs.constants`. On Windows, flags are translated to +are available from `fs.constants`. On Windows, flags are translated to their equivalent ones where applicable, e.g. `O_WRONLY` to `FILE_GENERIC_WRITE`, or `O_EXCL|O_CREAT` to `CREATE_NEW`, as accepted by CreateFileW. @@ -1373,7 +1373,7 @@ added: v0.1.8 * `err` {Error} * `files` {string[]|Buffer[]} -Asynchronous readdir(3). Reads the contents of a directory. +Asynchronous readdir(3). Reads the contents of a directory. The callback gets two arguments `(err, files)` where `files` is an array of the names of the files in the directory excluding `'.'` and `'..'`. @@ -1809,12 +1809,12 @@ added: v0.5.10 * `filename` {string|Buffer} Watch for changes on `filename`, where `filename` is either a file or a -directory. The returned object is a [`fs.FSWatcher`][]. +directory. The returned object is a [`fs.FSWatcher`][]. The second argument is optional. If `options` is provided as a string, it specifies the `encoding`. Otherwise `options` should be passed as an object. -The listener callback gets two arguments `(eventType, filename)`. `eventType` is either +The listener callback gets two arguments `(eventType, filename)`. `eventType` is either `'rename'` or `'change'`, and `filename` is the name of the file which triggered the event. @@ -1879,7 +1879,7 @@ this improves the usability of file watching. This is expected behavior. Providing `filename` argument in the callback is only supported on Linux and -Windows. Even on supported platforms, `filename` is not always guaranteed to +Windows. Even on supported platforms, `filename` is not always guaranteed to be provided. Therefore, don't assume that `filename` argument is always provided in the callback, and have some fallback logic if it is null. @@ -2000,7 +2000,7 @@ added: v0.11.5 * `written` {integer} * `string` {string} -Write `string` to the file specified by `fd`. If `string` is not a string, then +Write `string` to the file specified by `fd`. If `string` is not a string, then the value will be coerced to one. `position` refers to the offset from the beginning of the file where this data diff --git a/doc/api/globals.md b/doc/api/globals.md index 054bc2300bd411..79b11d0bde6c0e 100644 --- a/doc/api/globals.md +++ b/doc/api/globals.md @@ -188,7 +188,7 @@ added: v0.1.13 * {Function} -To require modules. See the [Modules][] section. `require` is not actually a +To require modules. See the [Modules][] section. `require` is not actually a global but rather local to each module. ### require.cache @@ -221,14 +221,14 @@ Process files with the extension `.sjs` as `.js`: require.extensions['.sjs'] = require.extensions['.js']; ``` -**Deprecated** In the past, this list has been used to load +**Deprecated** In the past, this list has been used to load non-JavaScript modules into Node.js by compiling them on-demand. However, in practice, there are much better ways to do this, such as loading modules via some other Node.js program, or compiling them to JavaScript ahead of time. Since the Module system is locked, this feature will probably never go -away. However, it may have subtle bugs and complexities that are best +away. However, it may have subtle bugs and complexities that are best left untouched. ### require.resolve() diff --git a/doc/api/http.md b/doc/api/http.md index 597aa3fda179ed..00b0bb5b3b8c88 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -9,7 +9,7 @@ To use the HTTP server and client one must `require('http')`. The HTTP interfaces in Node.js are designed to support many features of the protocol which have been traditionally difficult to use. In particular, large, possibly chunk-encoded, messages. The interface is -careful to never buffer entire requests or responses--the +careful to never buffer entire requests or responses — the user is able to stream data. HTTP message headers are represented by an object like this: @@ -33,7 +33,7 @@ parse the actual headers or the body. See [`message.headers`][] for details on how duplicate headers are handled. The raw headers as they were received are retained in the `rawHeaders` -property, which is an array of `[key, value, key2, value2, ...]`. For +property, which is an array of `[key, value, key2, value2, ...]`. For example, the previous message header object might have a `rawHeaders` list like the following: @@ -122,9 +122,9 @@ added: v0.3.4 for TCP Keep-Alive packets. Ignored when the `keepAlive` option is `false` or `undefined`. Default = `1000`. * `maxSockets` {number} Maximum number of sockets to allow per - host. Default = `Infinity`. + host. Default = `Infinity`. * `maxFreeSockets` {number} Maximum number of sockets to leave open - in a free state. Only relevant if `keepAlive` is set to `true`. + in a free state. Only relevant if `keepAlive` is set to `true`. Default = `256`. The default [`http.globalAgent`][] that is used by [`http.request()`][] has all @@ -202,9 +202,9 @@ added: v0.11.4 Destroy any sockets that are currently in use by the agent. -It is usually not necessary to do this. However, if you are using an +It is usually not necessary to do this. However, if you are using an agent with `keepAlive` enabled, then it is best to explicitly shut down -the agent when you know that it will no longer be used. Otherwise, +the agent when you know that it will no longer be used. Otherwise, sockets may hang open for quite a long time before the server terminates them. @@ -216,7 +216,7 @@ added: v0.11.4 * {Object} An object which contains arrays of sockets currently awaiting use by -the agent when `keepAlive` is enabled. Do not modify. +the agent when `keepAlive` is enabled. Do not modify. ### agent.getName(options) -This object is created internally and returned from [`http.request()`][]. It -represents an _in-progress_ request whose header has already been queued. The +This object is created internally and returned from [`http.request()`][]. It +represents an _in-progress_ request whose header has already been queued. The header is still mutable using the `setHeader(name, value)`, `getHeader(name)`, -`removeHeader(name)` API. The actual header will be sent along with the first +`removeHeader(name)` API. The actual header will be sent along with the first data chunk or when closing the connection. To get the response, add a listener for [`'response'`][] to the request object. [`'response'`][] will be emitted from the request object when the response -headers have been received. The [`'response'`][] event is executed with one +headers have been received. The [`'response'`][] event is executed with one argument which is an instance of [`http.IncomingMessage`][]. During the [`'response'`][] event, one can add listeners to the response object; particularly to listen for the `'data'` event. If no [`'response'`][] handler is added, then the response will be -entirely discarded. However, if you add a [`'response'`][] event handler, +entirely discarded. However, if you add a [`'response'`][] event handler, then you **must** consume the data from the response object, either by calling `response.read()` whenever there is a `'readable'` event, or by adding a `'data'` handler, or by calling the `.resume()` method. -Until the data is consumed, the `'end'` event will not fire. Also, until +Until the data is consumed, the `'end'` event will not fire. Also, until the data is read it will consume memory that can eventually lead to a 'process out of memory' error. @@ -517,11 +517,11 @@ added: v1.6.0 Flush the request headers. For efficiency reasons, Node.js normally buffers the request headers until you -call `request.end()` or write the first chunk of request data. It then tries +call `request.end()` or write the first chunk of request data. It then tries hard to pack the request headers and data into a single TCP packet. That's usually what you want (it saves a TCP round-trip) but not when the first -data is not sent until possibly much later. `request.flushHeaders()` lets you bypass +data is not sent until possibly much later. `request.flushHeaders()` lets you bypass the optimization and kickstart the request. ### request.setNoDelay([noDelay]) @@ -567,9 +567,9 @@ added: v0.1.29 * `encoding` {string} * `callback` {Function} -Sends a chunk of the body. By calling this method +Sends a chunk of the body. By calling this method many times, the user can stream a request body to a -server--in that case it is suggested to use the +server — in that case it is suggested to use the `['Transfer-Encoding', 'chunked']` header line when creating the request. @@ -733,7 +733,7 @@ added: v0.1.90 * `callback` {Function} -Stops the server from accepting new connections. See [`net.Server.close()`][]. +Stops the server from accepting new connections. See [`net.Server.close()`][]. ### server.listen(handle[, callback]) -This object is created internally by an HTTP server--not by the user. It is +This object is created internally by an HTTP server — not by the user. It is passed as the second parameter to the [`'request'`][] event. The response implements, but does not inherit from, the [Writable Stream][] @@ -1015,8 +1015,8 @@ added: v0.4.0 * `name` {string} * `value` {string} -Sets a single header value for implicit headers. If this header already exists -in the to-be-sent headers, its value will be replaced. Use an array of strings +Sets a single header value for implicit headers. If this header already exists +in the to-be-sent headers, its value will be replaced. Use an array of strings here if you need to send multiple headers with the same name. Example: @@ -1056,12 +1056,12 @@ added: v0.9.12 * `msecs` {number} * `callback` {Function} -Sets the Socket's timeout value to `msecs`. If a callback is +Sets the Socket's timeout value to `msecs`. If a callback is provided, then it is added as a listener on the `'timeout'` event on the response object. If no `'timeout'` listener is added to the request, the response, or -the server, then sockets are destroyed when they time out. If you +the server, then sockets are destroyed when they time out. If you assign a handler on the request, the response, or the server's `'timeout'` events, then it is your responsibility to handle timed out sockets. @@ -1313,8 +1313,8 @@ added: v0.11.6 The raw request/response headers list exactly as they were received. -Note that the keys and values are in the same list. It is *not* a -list of tuples. So, the even-numbered offsets are key values, and the +Note that the keys and values are in the same list. It is *not* a +list of tuples. So, the even-numbered offsets are key values, and the odd-numbered offsets are the associated values. Header names are not lowercased, and duplicates are not merged. @@ -1341,7 +1341,7 @@ added: v0.11.6 * {Array} The raw request/response trailer keys and values exactly as they were -received. Only populated at the `'end'` event. +received. Only populated at the `'end'` event. ### message.setTimeout(msecs, callback) -An Agent object for HTTPS similar to [`http.Agent`][]. See [`https.request()`][] +An Agent object for HTTPS similar to [`http.Agent`][]. See [`https.request()`][] for more information. ## Class: https.Server diff --git a/doc/api/modules.md b/doc/api/modules.md index e87b209395fb1a..59c675f17c813e 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -28,7 +28,7 @@ exports.circumference = (r) => 2 * PI * r; ``` The module `circle.js` has exported the functions `area()` and -`circumference()`. To add functions and objects to the root of your module, +`circumference()`. To add functions and objects to the root of your module, you can add them to the special `exports` object. Variables local to the module will be private, because the module is wrapped @@ -92,7 +92,7 @@ Let's say that we wanted to have the folder at specific version of a package. Packages can depend on one another. In order to install package `foo`, you -may have to install a specific version of package `bar`. The `bar` package +may have to install a specific version of package `bar`. The `bar` package may itself have dependencies, and in some cases, these dependencies may even collide or form cycles. @@ -121,12 +121,12 @@ the version that is symlinked into Furthermore, to make the module lookup process even more optimal, rather than putting packages directly in `/usr/lib/node`, we could put them in -`/usr/lib/node_modules//`. Then Node.js will not bother +`/usr/lib/node_modules//`. Then Node.js will not bother looking for missing dependencies in `/usr/node_modules` or `/node_modules`. In order to make modules available to the Node.js REPL, it might be useful to also add the `/usr/lib/node_modules` folder to the `$NODE_PATH` environment -variable. Since the module lookups using `node_modules` folders are all +variable. Since the module lookups using `node_modules` folders are all relative, and based on the real path of the files making the calls to `require()`, the packages themselves can be anywhere. @@ -194,12 +194,12 @@ NODE_MODULES_PATHS(START) -Modules are cached after the first time they are loaded. This means +Modules are cached after the first time they are loaded. This means (among other things) that every call to `require('foo')` will get exactly the same object returned, if it would resolve to the same file. Multiple calls to `require('foo')` may not cause the module code to be -executed multiple times. This is an important feature. With it, +executed multiple times. This is an important feature. With it, "partially done" objects can be returned, thus allowing transitive dependencies to be loaded even when they would cause cycles. @@ -210,7 +210,7 @@ function, and call that function. -Modules are cached based on their resolved filename. Since modules may +Modules are cached based on their resolved filename. Since modules may resolve to a different filename based on the location of the calling module (loading from `node_modules` folders), it is not a *guarantee* that `require('foo')` will always return the exact same object, if it @@ -226,14 +226,14 @@ irrespective of whether or not `./foo` and `./FOO` are the same file. -Node.js has several modules compiled into the binary. These modules are +Node.js has several modules compiled into the binary. These modules are described in greater detail elsewhere in this documentation. The core modules are defined within Node.js's source and are located in the `lib/` folder. Core modules are always preferentially loaded if their identifier is -passed to `require()`. For instance, `require('http')` will always +passed to `require()`. For instance, `require('http')` will always return the built in HTTP module, even if there is a file by that name. ## Cycles @@ -273,13 +273,13 @@ console.log('b done'); console.log('main starting'); const a = require('./a.js'); const b = require('./b.js'); -console.log('in main, a.done=%j, b.done=%j', a.done, b.done); +console.log('in main, a.done = %j, b.done = %j', a.done, b.done); ``` -When `main.js` loads `a.js`, then `a.js` in turn loads `b.js`. At that -point, `b.js` tries to load `a.js`. In order to prevent an infinite +When `main.js` loads `a.js`, then `a.js` in turn loads `b.js`. At that +point, `b.js` tries to load `a.js`. In order to prevent an infinite loop, an **unfinished copy** of the `a.js` exports object is returned to the -`b.js` module. `b.js` then finishes loading, and its `exports` object is +`b.js` module. `b.js` then finishes loading, and its `exports` object is provided to the `a.js` module. By the time `main.js` has loaded both modules, they're both finished. @@ -294,7 +294,7 @@ in b, a.done = false b done in a, b.done = true a done -in main, a.done=true, b.done=true +in main, a.done = true, b.done = true ``` If you have cyclic module dependencies in your program, make sure to @@ -312,7 +312,7 @@ required filename with the added extensions: `.js`, `.json`, and finally parsed as JSON text files. `.node` files are interpreted as compiled addon modules loaded with `dlopen`. -A required module prefixed with `'/'` is an absolute path to the file. For +A required module prefixed with `'/'` is an absolute path to the file. For example, `require('/home/marco/foo.js')` will load the file at `/home/marco/foo.js`. @@ -336,7 +336,7 @@ There are three ways in which a folder may be passed to `require()` as an argument. The first is to create a `package.json` file in the root of the folder, -which specifies a `main` module. An example package.json file might +which specifies a `main` module. An example package.json file might look like this: ```json @@ -350,7 +350,7 @@ If this was in a folder at `./some-library`, then This is the extent of Node.js's awareness of package.json files. -Note: If the file specified by the `"main"` entry of `package.json` is missing +Note: If the file specified by the `'main'` entry of `package.json` is missing and can not be resolved, Node.js will report the entire module as missing with the default error: @@ -360,7 +360,7 @@ Error: Cannot find module 'some-library' If there is no package.json file present in the directory, then Node.js will attempt to load an `index.js` or `index.node` file out of that -directory. For example, if there was no package.json file in the above +directory. For example, if there was no package.json file in the above example, then `require('./some-library')` would attempt to load: * `./some-library/index.js` @@ -403,7 +403,7 @@ same module resolution semantics. If the `NODE_PATH` environment variable is set to a colon-delimited list of absolute paths, then Node.js will search those paths for modules if they -are not found elsewhere. (Note: On Windows, `NODE_PATH` is delimited by +are not found elsewhere. (Note: On Windows, `NODE_PATH` is delimited by semicolons instead of colons.) `NODE_PATH` was originally created to support loading modules from @@ -412,7 +412,7 @@ varying paths before the current [module resolution][] algorithm was frozen. `NODE_PATH` is still supported, but is less necessary now that the Node.js ecosystem has settled on a convention for locating dependent modules. Sometimes deployments that rely on `NODE_PATH` show surprising behavior -when people are unaware that `NODE_PATH` must be set. Sometimes a +when people are unaware that `NODE_PATH` must be set. Sometimes a module's dependencies change, causing a different version (or even a different module) to be loaded as the `NODE_PATH` is searched. @@ -425,8 +425,8 @@ Additionally, Node.js will search in the following locations: Where `$HOME` is the user's home directory, and `$PREFIX` is Node.js's configured `node_prefix`. -These are mostly for historic reasons. **You are highly encouraged -to place your dependencies locally in `node_modules` folders.** They +These are mostly for historic reasons. **You are highly encouraged +to place your dependencies locally in `node_modules` folders.** They will be loaded faster, and more reliably. ## The module wrapper @@ -464,7 +464,7 @@ added: v0.1.16 * {Object} In each module, the `module` free variable is a reference to the object -representing the current module. For convenience, `module.exports` is +representing the current module. For convenience, `module.exports` is also accessible via the `exports` module-global. `module` is not actually a global but rather local to each module. @@ -515,7 +515,7 @@ a.on('ready', () => { Note that assignment to `module.exports` must be done immediately. It cannot be -done in any callbacks. This does not work: +done in any callbacks. This does not work: x.js: @@ -595,7 +595,7 @@ added: v0.1.16 * {string} -The identifier for the module. Typically this is the fully resolved +The identifier for the module. Typically this is the fully resolved filename. ### module.loaded @@ -629,7 +629,7 @@ The `module.require` method provides a way to load a module as if `require()` was called from the original module. Note that in order to do this, you must get a reference to the `module` -object. Since `require()` returns the `module.exports`, and the `module` is +object. Since `require()` returns the `module.exports`, and the `module` is typically *only* available within a specific module's code, it must be explicitly exported in order to be used. @@ -642,7 +642,7 @@ added: v0.3.7 * {Object} Provides general utility methods when interacting with instances of -`Module` -- the `module` variable often seen in file modules. Accessed +`Module` — the `module` variable often seen in file modules. Accessed via `require('module')`. ### module.builtinModules @@ -652,7 +652,7 @@ added: v6.13.0 * {string[]} -A list of the names of all modules provided by Node.js. Can be used to verify +A list of the names of all modules provided by Node.js. Can be used to verify if a module is maintained by a third-party module or not. [`__dirname`]: #modules_dirname diff --git a/doc/api/net.md b/doc/api/net.md index 8e40c2297df77a..0c9adf4bd1ab9d 100644 --- a/doc/api/net.md +++ b/doc/api/net.md @@ -121,7 +121,7 @@ connections use asynchronous `server.getConnections` instead. added: v0.9.7 --> -* Returns {net.Server} +* Returns: {net.Server} Asynchronously get the number of concurrent connections on the server. Works when sockets were sent to forks. @@ -147,7 +147,7 @@ already been bound to a port or domain socket. Listening on a file descriptor is not supported on Windows. -This function is asynchronous. When the server has been bound, +This function is asynchronous. When the server has been bound, [`'listening'`][] event will be emitted. The last parameter `callback` will be added as a listener for the [`'listening'`][] event. @@ -203,8 +203,8 @@ added: v0.1.90 Start a local socket server listening for connections on the given `path`. -This function is asynchronous. When the server has been bound, -[`'listening'`][] event will be emitted. The last parameter `callback` +This function is asynchronous. When the server has been bound, +[`'listening'`][] event will be emitted. The last parameter `callback` will be added as a listener for the [`'listening'`][] event. On UNIX, the local domain is usually known as the UNIX domain. The path is a @@ -218,7 +218,7 @@ unlinked*. On Windows, the local domain is implemented using a named pipe. The path *must* refer to an entry in `\\?\pipe\` or `\\.\pipe\`. Any characters are permitted, but the latter may do some processing of pipe names, such as resolving `..` -sequences. Despite appearances, the pipe name space is flat. Pipes will *not +sequences. Despite appearances, the pipe name space is flat. Pipes will *not persist*, they are removed when the last reference to them is closed. Do not forget JavaScript string escaping requires paths to be specified with double-backslashes, such as: @@ -252,8 +252,8 @@ The actual length will be determined by the OS through sysctl settings such as `tcp_max_syn_backlog` and `somaxconn` on Linux. The default value of this parameter is 511 (not 512). -This function is asynchronous. When the server has been bound, -[`'listening'`][] event will be emitted. The last parameter `callback` +This function is asynchronous. When the server has been bound, +[`'listening'`][] event will be emitted. The last parameter `callback` will be added as a listener for the [`'listening'`][] event. One issue some users run into is getting `EADDRINUSE` errors. This means that @@ -323,8 +323,8 @@ active server in the event system. If the server is already `unref`d calling added: v0.3.4 --> -This object is an abstraction of a TCP or local socket. `net.Socket` -instances implement a duplex Stream interface. They can be created by the +This object is an abstraction of a TCP or local socket. `net.Socket` +instances implement a duplex Stream interface. They can be created by the user and used as a client (with [`connect()`][]) or they can be created by Node.js and passed to the user through the `'connection'` event of a server. @@ -379,8 +379,8 @@ added: v0.1.90 * {Buffer} -Emitted when data is received. The argument `data` will be a `Buffer` or -`String`. Encoding of data is set by `socket.setEncoding()`. +Emitted when data is received. The argument `data` will be a `Buffer` or +`String`. Encoding of data is set by `socket.setEncoding()`. (See the [Readable Stream][] section for more information.) Note that the **data will be lost** if there is no listener when a `Socket` @@ -403,7 +403,7 @@ added: v0.1.90 Emitted when the other end of the socket sends a FIN packet. By default (`allowHalfOpen == false`) the socket will destroy its file -descriptor once it has written out its pending write queue. However, by +descriptor once it has written out its pending write queue. However, by setting `allowHalfOpen == true` the socket will not automatically `end()` its side allowing the user to write arbitrary amounts of data, with the caveat that the user is required to `end()` their side now. @@ -415,7 +415,7 @@ added: v0.1.90 * {Error} -Emitted when an error occurs. The `'close'` event will be called directly +Emitted when an error occurs. The `'close'` event will be called directly following this event. ### Event: 'lookup' @@ -426,9 +426,9 @@ added: v0.11.3 Emitted after resolving the hostname but before connecting. Not applicable to UNIX sockets. -* `err` {Error|null} The error object. See [`dns.lookup()`][]. +* `err` {Error|null} The error object. See [`dns.lookup()`][]. * `address` {string} The IP address. -* `family` {string|null} The address type. See [`dns.lookup()`][]. +* `family` {string|null} The address type. See [`dns.lookup()`][]. * `host` {string} The hostname. ### Event: 'timeout' @@ -718,7 +718,7 @@ added: v0.1.90 --> Sends data on the socket. The second parameter specifies the encoding in the -case of a string--it defaults to UTF8 encoding. +case of a string — it defaults to UTF8 encoding. Returns `true` if the entire data was flushed successfully to the kernel buffer. Returns `false` if all or part of the data was queued in user memory. diff --git a/doc/api/os.md b/doc/api/os.md index 011ce5e2bf1742..b8f5c76ce00793 100644 --- a/doc/api/os.md +++ b/doc/api/os.md @@ -225,7 +225,7 @@ The `os.loadavg()` method returns an array containing the 1, 5, and 15 minute load averages. The load average is a measure of system activity, calculated by the operating -system and expressed as a fractional number. As a rule of thumb, the load +system and expressed as a fractional number. As a rule of thumb, the load average should ideally be less than the number of logical CPUs in the system. The load average is a UNIX-specific concept with no real equivalent on @@ -392,7 +392,7 @@ added: v6.0.0 * Returns: {Object} The `os.userInfo()` method returns information about the currently effective -user -- on POSIX platforms, this is typically a subset of the password file. The +user — on POSIX platforms, this is typically a subset of the password file. The returned object includes the `username`, `uid`, `gid`, `shell`, and `homedir`. On Windows, the `uid` and `gid` fields are `-1`, and `shell` is `null`. diff --git a/doc/api/path.md b/doc/api/path.md index 807cf9b88c9869..7ab4dabb79e04f 100644 --- a/doc/api/path.md +++ b/doc/api/path.md @@ -145,7 +145,7 @@ added: v0.1.25 The `path.extname()` method returns the extension of the `path`, from the last occurrence of the `.` (period) character to end of string in the last portion of -the `path`. If there is no `.` in the last portion of the `path`, or if the +the `path`. If there is no `.` in the last portion of the `path`, or if the first character of the basename of `path` (see `path.basename()`) is `.`, then an empty string is returned. @@ -380,7 +380,7 @@ path.parse('/home/user/dir/file.txt'); │ root │ │ name │ ext │ " / home/user/dir / file .txt " └──────┴──────────────┴──────┴─────┘ -(all spaces in the "" line should be ignored -- they are purely for formatting) +(all spaces in the "" line should be ignored — they are purely for formatting) ``` On Windows: @@ -404,7 +404,7 @@ path.parse('C:\\path\\dir\\file.txt'); │ root │ │ name │ ext │ " C:\ path\dir \ file .txt " └──────┴──────────────┴──────┴─────┘ -(all spaces in the "" line should be ignored -- they are purely for formatting) +(all spaces in the "" line should be ignored — they are purely for formatting) ``` A [`TypeError`][] is thrown if `path` is not a string. diff --git a/doc/api/process.md b/doc/api/process.md index c9aebfa8d2cce4..af20ddb1f751b1 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -189,7 +189,7 @@ process will exit with a non-zero exit code and the stack trace will be printed. This is to avoid infinite recursion. Attempting to resume normally after an uncaught exception can be similar to -pulling out of the power cord when upgrading a computer -- nine out of ten +pulling out of the power cord when upgrading a computer — nine out of ten times nothing happens - but the 10th time, the system becomes corrupted. The correct use of `'uncaughtException'` is to perform synchronous cleanup @@ -388,7 +388,7 @@ For example: process.stdin.resume(); process.on('SIGINT', () => { - console.log('Received SIGINT. Press Control-D to exit.'); + console.log('Received SIGINT. Press Control-D to exit.'); }); ``` @@ -397,7 +397,7 @@ terminal programs. It is important to take note of the following: -* `SIGUSR1` is reserved by Node.js to start the debugger. It's possible to +* `SIGUSR1` is reserved by Node.js to start the debugger. It's possible to install a listener but doing so will _not_ stop the debugger from starting. * `SIGTERM` and `SIGINT` have default handlers on non-Windows platforms that resets the terminal mode before exiting with code `128 + signal number`. If @@ -470,7 +470,7 @@ added: v0.1.27 The `process.argv` property returns an array containing the command line arguments passed when the Node.js process was launched. The first element will be [`process.execPath`]. See `process.argv0` if access to the original value of -`argv[0]` is needed. The second element will be the path to the JavaScript +`argv[0]` is needed. The second element will be the path to the JavaScript file being executed. The remaining elements will be any additional command line arguments. @@ -890,7 +890,7 @@ added: v0.1.13 The `process.exit()` method instructs Node.js to terminate the process synchronously with an exit status of `code`. If `code` is omitted, exit uses either the 'success' code `0` or the value of `process.exitCode` if it has been -set. Node.js will not terminate until all the [`'exit'`] event listeners are +set. Node.js will not terminate until all the [`'exit'`] event listeners are called. To exit with a 'failure' code: @@ -1129,7 +1129,7 @@ Windows platforms will throw an error if the `pid` is used to kill a process group. *Note*:Even though the name of this function is `process.kill()`, it is really -just a signal sender, like the `kill` system call. The signal sent may do +just a signal sender, like the `kill` system call. The signal sent may do something other than kill the target process. For example: @@ -1219,7 +1219,7 @@ Once the current turn of the event loop turn runs to completion, all callbacks currently in the next tick queue will be called. This is *not* a simple alias to [`setTimeout(fn, 0)`][]. It is much more -efficient. It runs before any additional I/O events (including +efficient. It runs before any additional I/O events (including timers) fire in subsequent ticks of the event loop. ```js @@ -1254,7 +1254,7 @@ thing.getReadyForStuff(); ``` It is very important for APIs to be either 100% synchronous or 100% -asynchronous. Consider this example: +asynchronous. Consider this example: ```js // WARNING! DO NOT USE! BAD UNSAFE HAZARD! @@ -1296,7 +1296,7 @@ function definitelyAsync(arg, cb) { ``` *Note*: the next tick queue is completely drained on each pass of the -event loop **before** additional I/O is processed. As a result, +event loop **before** additional I/O is processed. As a result, recursively setting nextTick callbacks will block any I/O from happening, just like a `while(true);` loop. @@ -1371,8 +1371,8 @@ tarball. Support) line the current release is part of. This property only exists for LTS releases and is `undefined` for all other release types, including stable releases. Current valid values are: - - `"Argon"` for the v4.x LTS line beginning with v4.2.0. - - `"Boron"` for the v6.x LTS line beginning with v6.9.0. + - `'Argon'` for the v4.x LTS line beginning with v4.2.0. + - `'Boron'` for the v6.x LTS line beginning with v6.9.0. * `sourceUrl` {string} an absolute URL pointing to a _`.tar.gz`_ file containing the source code of the current release. * `headersUrl`{string} an absolute URL pointing to a _`.tar.gz`_ file containing @@ -1461,7 +1461,7 @@ added: v2.0.0 The `process.seteuid()` method sets the effective user identity of the process. (See seteuid(2).) The `id` can be passed as either a numeric ID or a username -string. If a username is specified, the method blocks while resolving the +string. If a username is specified, the method blocks while resolving the associated numeric ID. ```js @@ -1487,7 +1487,7 @@ added: v0.1.31 * `id` {string|number} The group name or ID The `process.setgid()` method sets the group identity of the process. (See -setgid(2).) The `id` can be passed as either a numeric ID or a group name +setgid(2).) The `id` can be passed as either a numeric ID or a group name string. If a group name is specified, this method blocks while resolving the associated numeric ID. @@ -1528,7 +1528,7 @@ added: v0.1.28 --> The `process.setuid(id)` method sets the user identity of the process. (See -setuid(2).) The `id` can be passed as either a numeric ID or a username string. +setuid(2).) The `id` can be passed as either a numeric ID or a username string. If a username is specified, the method blocks while resolving the associated numeric ID. @@ -1790,7 +1790,7 @@ Will generate an object similar to: ## Exit Codes Node.js will normally exit with a `0` status code when no more async -operations are pending. The following status codes are used in other +operations are pending. The following status codes are used in other cases: * `1` **Uncaught Fatal Exception** - There was an uncaught exception, @@ -1798,12 +1798,12 @@ cases: handler. * `2` - Unused (reserved by Bash for builtin misuse) * `3` **Internal JavaScript Parse Error** - The JavaScript source code - internal in Node.js's bootstrapping process caused a parse error. This + internal in Node.js's bootstrapping process caused a parse error. This is extremely rare, and generally can only happen during development of Node.js itself. * `4` **Internal JavaScript Evaluation Failure** - The JavaScript source code internal in Node.js's bootstrapping process failed to - return a function value when evaluated. This is extremely rare, and + return a function value when evaluated. This is extremely rare, and generally can only happen during development of Node.js itself. * `5` **Fatal Error** - There was a fatal unrecoverable error in V8. Typically a message will be printed to stderr with the prefix `FATAL @@ -1813,23 +1813,23 @@ cases: function was somehow set to a non-function, and could not be called. * `7` **Internal Exception Handler Run-Time Failure** - There was an uncaught exception, and the internal fatal exception handler - function itself threw an error while attempting to handle it. This + function itself threw an error while attempting to handle it. This can happen, for example, if a [`'uncaughtException'`][] or `domain.on('error')` handler throws an error. -* `8` - Unused. In previous versions of Node.js, exit code 8 sometimes +* `8` - Unused. In previous versions of Node.js, exit code 8 sometimes indicated an uncaught exception. * `9` - **Invalid Argument** - Either an unknown option was specified, or an option requiring a value was provided without a value. * `10` **Internal JavaScript Run-Time Failure** - The JavaScript source code internal in Node.js's bootstrapping process threw an error - when the bootstrapping function was called. This is extremely rare, + when the bootstrapping function was called. This is extremely rare, and generally can only happen during development of Node.js itself. * `12` **Invalid Debug Argument** - The `--debug`, `--inspect` and/or `--debug-brk` options were set, but the port number chosen was invalid or unavailable. * `>128` **Signal Exits** - If Node.js receives a fatal signal such as `SIGKILL` or `SIGHUP`, then its exit code will be `128` plus the - value of the signal code. This is a standard POSIX practice, since + value of the signal code. This is a standard POSIX practice, since exit codes are defined to be 7-bit integers, and signal exits set the high-order bit, and then contain the value of the signal code. diff --git a/doc/api/readline.md b/doc/api/readline.md index 04c838a0024ed4..d31af7ff3f6b02 100644 --- a/doc/api/readline.md +++ b/doc/api/readline.md @@ -295,7 +295,7 @@ added: v0.1.98 * `shift` {boolean} `true` to indicate the `` key. * `name` {string} The name of the a key. -The `rl.write()` method will write either `data` or a key sequence identified +The `rl.write()` method will write either `data` or a key sequence identified by `key` to the `output`. The `key` argument is supported only if `output` is a [TTY][] text terminal. diff --git a/doc/api/repl.md b/doc/api/repl.md index 17c44fbe36c7ba..e8c87240b1a579 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -96,7 +96,7 @@ declared either implicitly or using the `var` keyword are declared at the The default evaluator provides access to any variables that exist in the global scope. It is possible to expose a variable to the REPL explicitly by assigning -it to the `context` object associated with each `REPLServer`. For example: +it to the `context` object associated with each `REPLServer`. For example: ```js const repl = require('repl'); @@ -391,7 +391,7 @@ added: v0.1.91 stream upon instantiation. * `eval` {Function} The function to be used when evaluating each given line of input. Defaults to an async wrapper for the JavaScript `eval()` - function. An `eval` function can error with `repl.Recoverable` to indicate + function. An `eval` function can error with `repl.Recoverable` to indicate the input was incomplete and prompt for additional lines. * `useColors` {boolean} If `true`, specifies that the default `writer` function should include ANSI color styling to REPL output. If a custom @@ -414,7 +414,7 @@ added: v0.1.91 * `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to prefacing every repl statement with `'use strict'`. * `repl.REPL_MODE_MAGIC` - attempt to evaluates expressions in default - mode. If expressions fail to parse, re-try in strict mode. + mode. If expressions fail to parse, re-try in strict mode. * `breakEvalOnSigint` - Stop evaluating the current piece of code when `SIGINT` is received, i.e. `Ctrl+C` is pressed. This cannot be used together with a custom `eval` function. Defaults to `false`. @@ -456,7 +456,7 @@ environment variables: - `NODE_REPL_HISTORY` - When a valid path is given, persistent REPL history will be saved to the specified file rather than `.node_repl_history` in the - user's home directory. Setting this value to `""` will disable persistent + user's home directory. Setting this value to `''` will disable persistent REPL history. Whitespace will be trimmed from the value. - `NODE_REPL_HISTORY_SIZE` - Defaults to `1000`. Controls how many lines of history will be persisted if history is available. Must be a positive number. @@ -469,7 +469,7 @@ environment variables: By default, the Node.js REPL will persist history between `node` REPL sessions by saving inputs to a `.node_repl_history` file located in the user's home directory. This can be disabled by setting the environment variable -`NODE_REPL_HISTORY=""`. +`NODE_REPL_HISTORY=''`. #### NODE_REPL_HISTORY_FILE * `size` {number} Optional argument to specify how much data to read. -* Return {string|Buffer|null} +* Returns: {string|Buffer|null} The `readable.read()` method pulls some data out of the internal buffer and returns it. If no data available to be read, `null` is returned. By default, @@ -1364,7 +1364,7 @@ It is recommended that errors occurring during the processing of the the callback and passing the error as the first argument. This will cause an `'error'` event to be emitted by the Writable. Throwing an Error from within `writable._write()` can result in unexpected and inconsistent behavior depending -on how the stream is being used. Using the callback ensures consistent and +on how the stream is being used. Using the callback ensures consistent and predictable handling of errors. ```js @@ -1543,7 +1543,7 @@ user programs. #### readable.push(chunk[, encoding]) * `chunk` {Buffer|null|string} Chunk of data to push into the read queue -* `encoding` {string} Encoding of String chunks. Must be a valid +* `encoding` {string} Encoding of String chunks. Must be a valid Buffer encoding, such as `'utf8'` or `'ascii'` * Returns: {boolean} `true` if additional chunks of data may continued to be pushed; `false` otherwise. @@ -1963,7 +1963,7 @@ The `transform._transform()` method is prefixed with an underscore because it is internal to the class that defines it, and should never be called directly by user programs. -`transform._transform()` is never called in parallel; streams implement a +`transform._transform()` is never called in parallel; streams implement a queue mechanism, and to receive the next chunk, `callback` must be called, either synchronously or asynchronously. diff --git a/doc/api/tls.md b/doc/api/tls.md index d307b376ef58e2..9b6027ab11f278 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -374,7 +374,7 @@ added: v0.6.0 --> Returns the bound address, the address family name, and port of the -server as reported by the operating system. See [`net.Server.address()`][] for +server as reported by the operating system. See [`net.Server.address()`][] for more information. ### server.close([callback]) @@ -472,7 +472,7 @@ added: v0.11.4 * `options` {Object} * `isServer`: The SSL/TLS protocol is asymmetrical, TLSSockets must know if they are to behave as a server or a client. If `true` the TLS socket will be - instantiated as a server. Defaults to `false`. + instantiated as a server. Defaults to `false`. * `server` {net.Server} An optional [`net.Server`][] instance. * `requestCert`: Whether to authenticate the remote peer by requesting a certificate. Clients always request a server certificate. Servers @@ -624,7 +624,7 @@ For example: { ... another certificate, possibly with a .issuerCertificate ... }, raw: < RAW DER buffer >, valid_from: 'Nov 11 09:52:22 2009 GMT', - valid_to: 'Nov 6 09:52:22 2029 GMT', + valid_to: 'Nov 6 09:52:22 2029 GMT', fingerprint: '2A:7A:C2:DD:E5:F9:CC:53:72:35:99:7A:02:5A:71:38:52:EC:8A:DF', serialNumber: 'B9B0D332A1AA5635' } ``` @@ -795,7 +795,7 @@ changes: rather than creating a new socket. Typically, this is an instance of [`net.Socket`][], but any `Duplex` stream is allowed. If this option is specified, `path`, `host` and `port` are ignored, - except for certificate validation. Usually, a socket is already connected + except for certificate validation. Usually, a socket is already connected when passed to `tls.connect()`, but it can be connected later. Note that connection/disconnection/destruction of `socket` is the user's responsibility, calling `tls.connect()` will not cause `net.connect()` to be @@ -910,7 +910,7 @@ added: v0.11.13 to decrypt it. * `key` {string|string[]|Buffer|Buffer[]|Object[]} Optional private keys in PEM format. PEM allows the option of private keys being encrypted. Encrypted - keys will be decrypted with `options.passphrase`. Multiple keys using + keys will be decrypted with `options.passphrase`. Multiple keys using different algorithms can be provided either as an array of unencrypted key strings or buffers, or an array of objects in the form `{pem: [, passphrase: ]}`. The object form can only occur in @@ -923,7 +923,7 @@ added: v0.11.13 consist of the PEM formatted certificate for a provided private `key`, followed by the PEM formatted intermediate certificates (if any), in order, and not including the root CA (the root CA must be pre-known to the peer, - see `ca`). When providing multiple cert chains, they do not have to be in + see `ca`). When providing multiple cert chains, they do not have to be in the same order as their private keys in `key`. If the intermediate certificates are not provided, the peer will not be able to validate the certificate, and the handshake will fail. @@ -933,7 +933,7 @@ added: v0.11.13 using this option. The value can be a string or Buffer, or an Array of strings and/or Buffers. Any string or Buffer can contain multiple PEM CAs concatenated together. The peer's certificate must be chainable to a CA - trusted by the server for the connection to be authenticated. When using + trusted by the server for the connection to be authenticated. When using certificates that are not chainable to a well-known CA, the certificate's CA must be explicitly specified as a trusted or the connection will fail to authenticate. @@ -945,7 +945,7 @@ added: v0.11.13 * `crl` {string|string[]|Buffer|Buffer[]} Optional PEM formatted CRLs (Certificate Revocation Lists). * `ciphers` {string} Optional cipher suite specification, replacing the - default. For more information, see [modifying the default cipher suite][]. + default. For more information, see [modifying the default cipher suite][]. * `honorCipherOrder` {boolean} Attempt to use the server's cipher suite preferences instead of the client's. When `true`, causes `SSL_OP_CIPHER_SERVER_PREFERENCE` to be set in `secureOptions`, see @@ -954,7 +954,7 @@ added: v0.11.13 APIs that create secure contexts leave it unset. * `ecdhCurve` {string} A string describing a named curve to use for ECDH key agreement or `false` to disable ECDH. Defaults to - [`tls.DEFAULT_ECDH_CURVE`]. Use [`crypto.getCurves()`][] to obtain a list + [`tls.DEFAULT_ECDH_CURVE`]. Use [`crypto.getCurves()`][] to obtain a list of available curve names. On recent releases, `openssl ecparam -list_curves` will also display the name and description of each available elliptic curve. * `dhparam` {string|Buffer} Diffie Hellman parameters, required for @@ -964,8 +964,8 @@ added: v0.11.13 for stronger security. If omitted or invalid, the parameters are silently discarded and DHE ciphers will not be available. * `secureProtocol` {string} Optional SSL method to use, default is - `"SSLv23_method"`. The possible values are listed as [SSL_METHODS][], use - the function names as strings. For example, `"SSLv3_method"` to force SSL + `'SSLv23_method'`. The possible values are listed as [SSL_METHODS][], use + the function names as strings. For example, `'SSLv3_method'` to force SSL version 3. * `secureOptions` {number} Optionally affect the OpenSSL protocol behavior, which is not usually necessary. This should be used carefully if at all! @@ -1027,7 +1027,7 @@ added: v0.3.2 servers, the identity options (`pfx` or `key`/`cert`) are usually required. * `secureConnectionListener` {Function} -Creates a new [tls.Server][]. The `secureConnectionListener`, if provided, is +Creates a new [tls.Server][]. The `secureConnectionListener`, if provided, is automatically set as a listener for the [`'secureConnection'`][] event. The following illustrates a simple echo server: diff --git a/doc/api/url.md b/doc/api/url.md index 61899d3c0ccf7b..5b9c3c23f43426 100644 --- a/doc/api/url.md +++ b/doc/api/url.md @@ -51,7 +51,7 @@ properties of a WHATWG `URL` object. ├─────────────┴─────────────────────┴─────────────────────┴──────────┴────────────────┴───────┤ │ href │ └─────────────────────────────────────────────────────────────────────────────────────────────┘ -(all spaces in the "" line should be ignored -- they are purely for formatting) +(all spaces in the "" line should be ignored — they are purely for formatting) ``` Parsing the URL string using the WHATWG API: @@ -555,7 +555,7 @@ Instantiate a new `URLSearchParams` object with an iterable map in a way that is similar to [`Map`][]'s constructor. `iterable` can be an Array or any iterable object. That means `iterable` can be another `URLSearchParams`, in which case the constructor will simply create a clone of the provided -`URLSearchParams`. Elements of `iterable` are key-value pairs, and can +`URLSearchParams`. Elements of `iterable` are key-value pairs, and can themselves be any iterable object. Duplicate keys are allowed. diff --git a/doc/api/util.md b/doc/api/util.md index 9e29cfc073ccc5..e1ad92095539bb 100644 --- a/doc/api/util.md +++ b/doc/api/util.md @@ -23,9 +23,9 @@ added: v0.11.3 The `util.debuglog()` method is used to create a function that conditionally writes debug messages to `stderr` based on the existence of the `NODE_DEBUG` -environment variable. If the `section` name appears within the value of that +environment variable. If the `section` name appears within the value of that environment variable, then the returned function operates similar to -[`console.error()`][]. If not, then the returned function is a no-op. +[`console.error()`][]. If not, then the returned function is a no-op. For example: @@ -43,7 +43,7 @@ it will output something like: FOO 3245: hello from foo [123] ``` -where `3245` is the process id. If it is not run with that +where `3245` is the process id. If it is not run with that environment variable set, then it will not print anything. Multiple comma-separated `section` names may be specified in the `NODE_DEBUG` @@ -108,7 +108,7 @@ corresponding argument. Supported placeholders are: * `%d` - Number (integer or floating point value). * `%i` - Integer. * `%f` - Floating point value. -* `%j` - JSON. Replaced with the string `'[Circular]'` if the argument +* `%j` - JSON. Replaced with the string `'[Circular]'` if the argument contains circular references. * `%%` - single percent sign (`'%'`). This does not consume an argument. @@ -157,7 +157,7 @@ the two styles are [semantically incompatible][]._ * `constructor` {Function} * `superConstructor` {Function} -Inherit the prototype methods from one [constructor][] into another. The +Inherit the prototype methods from one [constructor][] into another. The prototype of `constructor` will be set to a new object created from `superConstructor`. diff --git a/doc/api/v8.md b/doc/api/v8.md index 234835376bd3e0..a74bf64e535108 100644 --- a/doc/api/v8.md +++ b/doc/api/v8.md @@ -110,7 +110,7 @@ after the VM has started may result in unpredictable behavior, including crashes and data loss; or it may simply do nothing. The V8 options available for a version of Node.js may be determined by running -`node --v8-options`. An unofficial, community-maintained list of options +`node --v8-options`. An unofficial, community-maintained list of options and their effects is available [here][]. Usage: diff --git a/doc/api/vm.md b/doc/api/vm.md index 8c196f1d1a682f..9aab34f20b86c2 100644 --- a/doc/api/vm.md +++ b/doc/api/vm.md @@ -108,7 +108,7 @@ added: v0.3.1 will be thrown. * `breakOnSigint`: if `true`, the execution will be terminated when `SIGINT` (Ctrl+C) is received. Existing handlers for the - event that have been attached via `process.on("SIGINT")` will be disabled + event that have been attached via `process.on('SIGINT')` will be disabled during script execution, but will continue to work after that. If execution is terminated, an [`Error`][] will be thrown. diff --git a/doc/api/zlib.md b/doc/api/zlib.md index 6cef23824977c9..19ab15231913c3 100644 --- a/doc/api/zlib.md +++ b/doc/api/zlib.md @@ -57,8 +57,8 @@ header is used to identify the compression encodings actually applied to a message. **Note: the examples given below are drastically simplified to show -the basic concept.** Using `zlib` encoding can be expensive, and the results -ought to be cached. See [Memory Usage Tuning][] for more information +the basic concept.** Using `zlib` encoding can be expensive, and the results +ought to be cached. See [Memory Usage Tuning][] for more information on the speed/memory/compression tradeoffs involved in `zlib` usage. ```js @@ -157,7 +157,7 @@ The memory requirements for deflate are (in bytes): (1 << (windowBits + 2)) + (1 << (memLevel + 9)); ``` -That is: 128K for windowBits=15 + 128K for memLevel = 8 +That is: 128K for windowBits = 15 + 128K for memLevel = 8 (default values) plus a few kilobytes for small objects. For example, to reduce the default memory requirements from 256K to 128K, the @@ -175,20 +175,20 @@ The memory requirements for inflate are (in bytes) 1 << windowBits; ``` -That is, 32K for windowBits=15 (default value) plus a few kilobytes +That is, 32K for windowBits = 15 (default value) plus a few kilobytes for small objects. This is in addition to a single internal output slab buffer of size `chunkSize`, which defaults to 16K. The speed of `zlib` compression is affected most dramatically by the -`level` setting. A higher level will result in better compression, but -will take longer to complete. A lower level will result in less +`level` setting. A higher level will result in better compression, but +will take longer to complete. A lower level will result in less compression, but will be much faster. In general, greater memory usage options will mean that Node.js has to make fewer calls to `zlib` because it will be able to process more data on -each `write` operation. So, this is another factor that affects the +each `write` operation. So, this is another factor that affects the speed, at the cost of memory usage. ## Flushing @@ -231,7 +231,7 @@ added: v0.5.8 All of the constants defined in `zlib.h` are also defined on `require('zlib')`. In the normal course of operations, it will not be necessary to use these constants. They are documented so that their presence is not surprising. This -section is taken almost directly from the [zlib documentation][]. See +section is taken almost directly from the [zlib documentation][]. See for more details. Allowed flush values. @@ -288,14 +288,14 @@ added: v0.11.1 -Each class takes an `options` object. All options are optional. +Each class takes an `options` object. All options are optional. Note that some options are only relevant when compressing, and are ignored by the decompression classes. * `flush` (default: `zlib.Z_NO_FLUSH`) * `finishFlush` (default: `zlib.Z_FINISH`) -* `chunkSize` (default: 16*1024) +* `chunkSize` (default: `16 * 1024`) * `windowBits` * `level` (compression only) * `memLevel` (compression only) From 0ca2dad3a64b53baa9fdd0079ff3ef2ee5350453 Mon Sep 17 00:00:00 2001 From: Ivan Filenko Date: Wed, 24 Jan 2018 01:42:40 +0300 Subject: [PATCH 041/227] src: free memory before re-setting URLHost value MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes: https://github.com/nodejs/node/issues/18302 Backport-PR-URL: https://github.com/nodejs/node/pull/19639 PR-URL: https://github.com/nodejs/node/pull/18357 Reviewed-By: Tiancheng "Timothy" Gu Reviewed-By: Anatoli Papirovski Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Tobias Nießen --- src/node_url.cc | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/src/node_url.cc b/src/node_url.cc index 0e5c395b130d52..2a1058d3536c53 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -92,6 +92,16 @@ class URLHost { Value value_; HostType type_ = HostType::H_FAILED; + inline void Reset() { + using string = std::string; + switch (type_) { + case HostType::H_DOMAIN: value_.domain.~string(); break; + case HostType::H_OPAQUE: value_.opaque.~string(); break; + default: break; + } + type_ = HostType::H_FAILED; + } + // Setting the string members of the union with = is brittle because // it relies on them being initialized to a state that requires no // destruction of old data. @@ -101,12 +111,14 @@ class URLHost { // These helpers are the easiest solution but we might want to consider // just not forcing strings into an union. inline void SetOpaque(std::string* string) { + Reset(); type_ = HostType::H_OPAQUE; new(&value_.opaque) std::string(); value_.opaque.swap(*string); } inline void SetDomain(std::string* string) { + Reset(); type_ = HostType::H_DOMAIN; new(&value_.domain) std::string(); value_.domain.swap(*string); @@ -114,12 +126,7 @@ class URLHost { }; URLHost::~URLHost() { - using string = std::string; - switch (type_) { - case HostType::H_DOMAIN: value_.domain.~string(); break; - case HostType::H_OPAQUE: value_.opaque.~string(); break; - default: break; - } + Reset(); } #define ARGS(XX) \ From 8ab8d6afd6d2feda0fd7c1e7c05093719be4b51a Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Sat, 29 Apr 2017 20:25:35 +0200 Subject: [PATCH 042/227] stream: fix y.pipe(x)+y.pipe(x)+y.unpipe(x) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fix the uncommon situation when a readable stream is piped twice into the same destination stream, and then unpiped once. Previously, the `unpipe` event handlers weren’t able to tell whether they were corresponding to the “right” conceptual pipe that was being removed; this fixes this by adding a counter to the `unpipe` event handler and only removing a single piping destination at most. Fixes: https://github.com/nodejs/node/issues/12718 PR-URL: https://github.com/nodejs/node/pull/12746 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Matteo Collina --- lib/_stream_readable.js | 14 ++-- ...test-stream-pipe-same-destination-twice.js | 78 +++++++++++++++++++ 2 files changed, 87 insertions(+), 5 deletions(-) create mode 100644 test/parallel/test-stream-pipe-same-destination-twice.js diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index dfba99cbc74d7d..39a7ec8d93ad53 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -499,10 +499,13 @@ Readable.prototype.pipe = function(dest, pipeOpts) { src.once('end', endFn); dest.on('unpipe', onunpipe); - function onunpipe(readable) { + function onunpipe(readable, unpipeInfo) { debug('onunpipe'); if (readable === src) { - cleanup(); + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } } } @@ -628,6 +631,7 @@ function pipeOnDrain(src) { Readable.prototype.unpipe = function(dest) { var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; // if we're not piping anywhere, then do nothing. if (state.pipesCount === 0) @@ -647,7 +651,7 @@ Readable.prototype.unpipe = function(dest) { state.pipesCount = 0; state.flowing = false; if (dest) - dest.emit('unpipe', this); + dest.emit('unpipe', this, unpipeInfo); return this; } @@ -662,7 +666,7 @@ Readable.prototype.unpipe = function(dest) { state.flowing = false; for (var i = 0; i < len; i++) - dests[i].emit('unpipe', this); + dests[i].emit('unpipe', this, unpipeInfo); return this; } @@ -676,7 +680,7 @@ Readable.prototype.unpipe = function(dest) { if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit('unpipe', this); + dest.emit('unpipe', this, unpipeInfo); return this; }; diff --git a/test/parallel/test-stream-pipe-same-destination-twice.js b/test/parallel/test-stream-pipe-same-destination-twice.js new file mode 100644 index 00000000000000..1824c0606451a2 --- /dev/null +++ b/test/parallel/test-stream-pipe-same-destination-twice.js @@ -0,0 +1,78 @@ +'use strict'; +const common = require('../common'); + +// Regression test for https://github.com/nodejs/node/issues/12718. +// Tests that piping a source stream twice to the same destination stream +// works, and that a subsequent unpipe() call only removes the pipe *once*. +const assert = require('assert'); +const { PassThrough, Writable } = require('stream'); + +{ + const passThrough = new PassThrough(); + const dest = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert.strictEqual(`${chunk}`, 'foobar'); + cb(); + }) + }); + + passThrough.pipe(dest); + passThrough.pipe(dest); + + assert.strictEqual(passThrough._events.data.length, 2); + assert.strictEqual(passThrough._readableState.pipesCount, 2); + assert.strictEqual(passThrough._readableState.pipes[0], dest); + assert.strictEqual(passThrough._readableState.pipes[1], dest); + + passThrough.unpipe(dest); + + assert.strictEqual(passThrough._events.data.length, 1); + assert.strictEqual(passThrough._readableState.pipesCount, 1); + assert.strictEqual(passThrough._readableState.pipes, dest); + + passThrough.write('foobar'); + passThrough.pipe(dest); +} + +{ + const passThrough = new PassThrough(); + const dest = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert.strictEqual(`${chunk}`, 'foobar'); + cb(); + }, 2) + }); + + passThrough.pipe(dest); + passThrough.pipe(dest); + + assert.strictEqual(passThrough._events.data.length, 2); + assert.strictEqual(passThrough._readableState.pipesCount, 2); + assert.strictEqual(passThrough._readableState.pipes[0], dest); + assert.strictEqual(passThrough._readableState.pipes[1], dest); + + passThrough.write('foobar'); +} + +{ + const passThrough = new PassThrough(); + const dest = new Writable({ + write: common.mustNotCall() + }); + + passThrough.pipe(dest); + passThrough.pipe(dest); + + assert.strictEqual(passThrough._events.data.length, 2); + assert.strictEqual(passThrough._readableState.pipesCount, 2); + assert.strictEqual(passThrough._readableState.pipes[0], dest); + assert.strictEqual(passThrough._readableState.pipes[1], dest); + + passThrough.unpipe(dest); + passThrough.unpipe(dest); + + assert.strictEqual(passThrough._events.data, undefined); + assert.strictEqual(passThrough._readableState.pipesCount, 0); + + passThrough.write('foobar'); +} From e54b8e818428ae1146030d3500cb637488a21edc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=99=88=E5=88=9A?= Date: Sat, 20 Jan 2018 11:06:26 +0800 Subject: [PATCH 043/227] stream: cleanup() when unpiping all streams. This PR makes sure the object emitted as the 'unpipe' event in the destination stream is not shared between destination, as it would be muted. Refs: https://github.com/nodejs/node/pull/12746 PR-URL: https://github.com/nodejs/node/pull/18266 Reviewed-By: Luigi Pinca Reviewed-By: Anatoli Papirovski Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater Reviewed-By: Anna Henningsen --- lib/_stream_readable.js | 2 +- .../test-stream-pipe-unpipe-streams.js | 54 +++++++++++++++++++ 2 files changed, 55 insertions(+), 1 deletion(-) diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 39a7ec8d93ad53..21268eacd02748 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -666,7 +666,7 @@ Readable.prototype.unpipe = function(dest) { state.flowing = false; for (var i = 0; i < len; i++) - dests[i].emit('unpipe', this, unpipeInfo); + dests[i].emit('unpipe', this, { hasUnpiped: false }); return this; } diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js index 7e425aec1e379e..0e49dba5b8212b 100644 --- a/test/parallel/test-stream-pipe-unpipe-streams.js +++ b/test/parallel/test-stream-pipe-unpipe-streams.js @@ -31,3 +31,57 @@ source.unpipe(dest2); source.unpipe(dest1); assert.strictEqual(source._readableState.pipes, null); + +{ + // test `cleanup()` if we unpipe all streams. + const source = Readable({ read: () => {} }); + const dest1 = Writable({ write: () => {} }); + const dest2 = Writable({ write: () => {} }); + + let destCount = 0; + const srcCheckEventNames = ['end', 'data']; + const destCheckEventNames = ['close', 'finish', 'drain', 'error', 'unpipe']; + + const checkSrcCleanup = common.mustCall(() => { + assert.strictEqual(source._readableState.pipes, null); + assert.strictEqual(source._readableState.pipesCount, 0); + assert.strictEqual(source._readableState.flowing, false); + + srcCheckEventNames.forEach((eventName) => { + assert.strictEqual( + source.listenerCount(eventName), 0, + `source's '${eventName}' event listeners not removed` + ); + }); + }); + + function checkDestCleanup(dest) { + const currentDestId = ++destCount; + source.pipe(dest); + + const unpipeChecker = common.mustCall(() => { + assert.deepStrictEqual( + dest.listeners('unpipe'), [unpipeChecker], + `destination{${currentDestId}} should have a 'unpipe' event ` + + 'listener which is `unpipeChecker`' + ); + dest.removeListener('unpipe', unpipeChecker); + destCheckEventNames.forEach((eventName) => { + assert.strictEqual( + dest.listenerCount(eventName), 0, + `destination{${currentDestId}}'s '${eventName}' event ` + + 'listeners not removed' + ); + }); + + if (--destCount === 0) + checkSrcCleanup(); + }); + + dest.on('unpipe', unpipeChecker); + } + + checkDestCleanup(dest1); + checkDestCleanup(dest2); + source.unpipe(); +} From a9562fe30cc3a9f95078ace013e85432999857b9 Mon Sep 17 00:00:00 2001 From: Jason Ginchereau Date: Mon, 20 Mar 2017 14:55:26 -0700 Subject: [PATCH 044/227] n-api: add support for abi stable module API Add support for abi stable module API (N-API) as "Experimental feature". The goal of this API is to provide a stable Node API for native module developers. N-API aims to provide ABI compatibility guarantees across different Node versions and also across different Node VMs - allowing N-API enabled native modules to just work across different versions and flavors of Node.js without recompilation. A more detailed introduction is provided in: https://github.com/nodejs/node-eps/blob/master/005-ABI-Stable-Module-API.md and https://github.com/nodejs/abi-stable-node/blob/doc/VM%20Summit.pdf. The feature, during its experimental state, will be guarded by a runtime flag "--napi-modules". Only when this flag is added to the command line will N-API modules along with regular non N-API modules be supported. The API is defined by the methods in "src/node_api.h" and "src/node_api_types.h". This is the best starting point to review the API surface. More documentation will follow. In addition to the implementation of the API using V8, which is included in this PR, the API has also been validated against chakracore and that port is available in https://github.com/nodejs/abi-stable-node/tree/api-prototype-chakracore-8.x. The current plan is to provide N-API support in versions 8.X and 6.X directly. For older versions, such as 4.X or pre N-API versions of 6.X, we plan to create an external npm module to provide a migration path that will allow modules targeting older Node.js versions to use the API, albeit without getting the advantage of not having to recompile. In addition, we also plan an external npm package with C++ sugar to simplify the use of the API. The sugar will be in-line only and will only use the exported N-API methods but is not part of the N-API itself. The current version is in: https://github.com/nodejs/node-api. This PR is a result of work in the abi-stable-node repo: https://github.com/nodejs/abi-stable-node/tree/doc, with this PR being the cumulative work on the api-prototype-8.x branch with the following contributors in alphabetical order: Author: Arunesh Chandra Author: Gabriel Schulhof Author: Hitesh Kanwathirtha Author: Ian Halliday Author: Jason Ginchereau Author: Michael Dawson Author: Sampson Gao Author: Taylor Woll Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/11975 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- Makefile | 60 +- doc/api/cli.md | 8 + doc/node.1 | 5 + node.gyp | 3 + src/node.cc | 29 +- src/node_api.cc | 2529 +++++++++++++++++ src/node_api.h | 479 ++++ src/node_api_backport.h | 10 + src/node_api_types.h | 95 + test/addons-napi/.gitignore | 7 + test/addons-napi/1_hello_world/binding.c | 22 + test/addons-napi/1_hello_world/binding.gyp | 8 + test/addons-napi/1_hello_world/test.js | 6 + .../2_function_arguments/binding.c | 58 + .../2_function_arguments/binding.gyp | 8 + test/addons-napi/2_function_arguments/test.js | 6 + test/addons-napi/3_callbacks/binding.c | 51 + test/addons-napi/3_callbacks/binding.gyp | 8 + test/addons-napi/3_callbacks/test.js | 22 + test/addons-napi/4_object_factory/binding.c | 31 + test/addons-napi/4_object_factory/binding.gyp | 8 + test/addons-napi/4_object_factory/test.js | 8 + test/addons-napi/5_function_factory/binding.c | 36 + .../5_function_factory/binding.gyp | 8 + test/addons-napi/5_function_factory/test.js | 7 + test/addons-napi/6_object_wrap/binding.cc | 7 + test/addons-napi/6_object_wrap/binding.gyp | 8 + test/addons-napi/6_object_wrap/myobject.cc | 201 ++ test/addons-napi/6_object_wrap/myobject.h | 26 + test/addons-napi/6_object_wrap/test.js | 19 + test/addons-napi/7_factory_wrap/binding.cc | 32 + test/addons-napi/7_factory_wrap/binding.gyp | 8 + test/addons-napi/7_factory_wrap/myobject.cc | 110 + test/addons-napi/7_factory_wrap/myobject.h | 26 + test/addons-napi/7_factory_wrap/test.js | 14 + test/addons-napi/8_passing_wrapped/binding.cc | 57 + .../addons-napi/8_passing_wrapped/binding.gyp | 8 + .../addons-napi/8_passing_wrapped/myobject.cc | 81 + test/addons-napi/8_passing_wrapped/myobject.h | 26 + test/addons-napi/8_passing_wrapped/test.js | 9 + test/addons-napi/test_array/binding.gyp | 8 + test/addons-napi/test_array/test.js | 37 + test/addons-napi/test_array/test_array.c | 137 + test/addons-napi/test_buffer/binding.gyp | 8 + test/addons-napi/test_buffer/test.js | 25 + test/addons-napi/test_buffer/test_buffer.c | 160 ++ test/addons-napi/test_constructor/binding.gyp | 8 + test/addons-napi/test_constructor/test.js | 28 + .../test_constructor/test_constructor.c | 104 + test/addons-napi/test_conversions/binding.gyp | 8 + test/addons-napi/test_conversions/test.js | 140 + .../test_conversions/test_conversions.c | 237 ++ test/addons-napi/test_error/binding.gyp | 8 + test/addons-napi/test_error/test.js | 57 + test/addons-napi/test_error/test_error.cc | 37 + test/addons-napi/test_exception/binding.gyp | 8 + test/addons-napi/test_exception/test.js | 53 + .../test_exception/test_exception.c | 75 + test/addons-napi/test_function/binding.gyp | 8 + test/addons-napi/test_function/test.js | 28 + .../addons-napi/test_function/test_function.c | 55 + test/addons-napi/test_instanceof/binding.gyp | 8 + test/addons-napi/test_instanceof/test.js | 87 + .../test_instanceof/test_instanceof.c | 39 + test/addons-napi/test_number/binding.gyp | 8 + test/addons-napi/test_number/test.js | 39 + test/addons-napi/test_number/test_number.c | 55 + test/addons-napi/test_object/binding.gyp | 8 + test/addons-napi/test_object/test.js | 65 + test/addons-napi/test_object/test_object.c | 246 ++ test/addons-napi/test_properties/binding.gyp | 8 + test/addons-napi/test_properties/test.js | 27 + .../test_properties/test_properties.c | 85 + test/addons-napi/test_string/binding.gyp | 8 + test/addons-napi/test_string/test.js | 26 + test/addons-napi/test_string/test_string.c | 134 + test/addons-napi/test_symbol/binding.gyp | 8 + test/addons-napi/test_symbol/test1.js | 20 + test/addons-napi/test_symbol/test2.js | 15 + test/addons-napi/test_symbol/test3.js | 19 + test/addons-napi/test_symbol/test_symbol.c | 94 + test/addons-napi/test_typedarray/binding.gyp | 8 + test/addons-napi/test_typedarray/test.js | 39 + .../test_typedarray/test_typedarray.c | 144 + test/addons-napi/testcfg.py | 6 + test/testpy/__init__.py | 2 +- tools/install.py | 2 + tools/test.py | 1 + vcbuild.bat | 30 +- 89 files changed, 6586 insertions(+), 18 deletions(-) create mode 100644 src/node_api.cc create mode 100644 src/node_api.h create mode 100644 src/node_api_backport.h create mode 100644 src/node_api_types.h create mode 100644 test/addons-napi/.gitignore create mode 100644 test/addons-napi/1_hello_world/binding.c create mode 100644 test/addons-napi/1_hello_world/binding.gyp create mode 100644 test/addons-napi/1_hello_world/test.js create mode 100644 test/addons-napi/2_function_arguments/binding.c create mode 100644 test/addons-napi/2_function_arguments/binding.gyp create mode 100644 test/addons-napi/2_function_arguments/test.js create mode 100644 test/addons-napi/3_callbacks/binding.c create mode 100644 test/addons-napi/3_callbacks/binding.gyp create mode 100644 test/addons-napi/3_callbacks/test.js create mode 100644 test/addons-napi/4_object_factory/binding.c create mode 100644 test/addons-napi/4_object_factory/binding.gyp create mode 100644 test/addons-napi/4_object_factory/test.js create mode 100644 test/addons-napi/5_function_factory/binding.c create mode 100644 test/addons-napi/5_function_factory/binding.gyp create mode 100644 test/addons-napi/5_function_factory/test.js create mode 100644 test/addons-napi/6_object_wrap/binding.cc create mode 100644 test/addons-napi/6_object_wrap/binding.gyp create mode 100644 test/addons-napi/6_object_wrap/myobject.cc create mode 100644 test/addons-napi/6_object_wrap/myobject.h create mode 100644 test/addons-napi/6_object_wrap/test.js create mode 100644 test/addons-napi/7_factory_wrap/binding.cc create mode 100644 test/addons-napi/7_factory_wrap/binding.gyp create mode 100644 test/addons-napi/7_factory_wrap/myobject.cc create mode 100644 test/addons-napi/7_factory_wrap/myobject.h create mode 100644 test/addons-napi/7_factory_wrap/test.js create mode 100644 test/addons-napi/8_passing_wrapped/binding.cc create mode 100644 test/addons-napi/8_passing_wrapped/binding.gyp create mode 100644 test/addons-napi/8_passing_wrapped/myobject.cc create mode 100644 test/addons-napi/8_passing_wrapped/myobject.h create mode 100644 test/addons-napi/8_passing_wrapped/test.js create mode 100644 test/addons-napi/test_array/binding.gyp create mode 100644 test/addons-napi/test_array/test.js create mode 100644 test/addons-napi/test_array/test_array.c create mode 100644 test/addons-napi/test_buffer/binding.gyp create mode 100644 test/addons-napi/test_buffer/test.js create mode 100644 test/addons-napi/test_buffer/test_buffer.c create mode 100644 test/addons-napi/test_constructor/binding.gyp create mode 100644 test/addons-napi/test_constructor/test.js create mode 100644 test/addons-napi/test_constructor/test_constructor.c create mode 100644 test/addons-napi/test_conversions/binding.gyp create mode 100644 test/addons-napi/test_conversions/test.js create mode 100644 test/addons-napi/test_conversions/test_conversions.c create mode 100644 test/addons-napi/test_error/binding.gyp create mode 100644 test/addons-napi/test_error/test.js create mode 100644 test/addons-napi/test_error/test_error.cc create mode 100644 test/addons-napi/test_exception/binding.gyp create mode 100644 test/addons-napi/test_exception/test.js create mode 100644 test/addons-napi/test_exception/test_exception.c create mode 100644 test/addons-napi/test_function/binding.gyp create mode 100644 test/addons-napi/test_function/test.js create mode 100644 test/addons-napi/test_function/test_function.c create mode 100644 test/addons-napi/test_instanceof/binding.gyp create mode 100644 test/addons-napi/test_instanceof/test.js create mode 100644 test/addons-napi/test_instanceof/test_instanceof.c create mode 100644 test/addons-napi/test_number/binding.gyp create mode 100644 test/addons-napi/test_number/test.js create mode 100644 test/addons-napi/test_number/test_number.c create mode 100644 test/addons-napi/test_object/binding.gyp create mode 100644 test/addons-napi/test_object/test.js create mode 100644 test/addons-napi/test_object/test_object.c create mode 100644 test/addons-napi/test_properties/binding.gyp create mode 100644 test/addons-napi/test_properties/test.js create mode 100644 test/addons-napi/test_properties/test_properties.c create mode 100644 test/addons-napi/test_string/binding.gyp create mode 100644 test/addons-napi/test_string/test.js create mode 100644 test/addons-napi/test_string/test_string.c create mode 100644 test/addons-napi/test_symbol/binding.gyp create mode 100644 test/addons-napi/test_symbol/test1.js create mode 100644 test/addons-napi/test_symbol/test2.js create mode 100644 test/addons-napi/test_symbol/test3.js create mode 100644 test/addons-napi/test_symbol/test_symbol.c create mode 100644 test/addons-napi/test_typedarray/binding.gyp create mode 100644 test/addons-napi/test_typedarray/test.js create mode 100644 test/addons-napi/test_typedarray/test_typedarray.c create mode 100644 test/addons-napi/testcfg.py diff --git a/Makefile b/Makefile index c947a2a4367d05..523a3d8c541ed8 100644 --- a/Makefile +++ b/Makefile @@ -120,9 +120,10 @@ v8: test: all $(MAKE) build-addons + $(MAKE) build-addons-napi $(MAKE) cctest $(PYTHON) tools/test.py --mode=release -J \ - doctool inspector known_issues message pseudo-tty parallel sequential addons + doctool inspector known_issues message pseudo-tty parallel sequential addons addons-napi $(MAKE) lint test-parallel: all @@ -189,6 +190,41 @@ test/addons/.buildstamp: config.gypi \ # TODO(bnoordhuis) Force rebuild after gyp update. build-addons: $(NODE_EXE) test/addons/.buildstamp +ADDONS_NAPI_BINDING_GYPS := \ + $(filter-out test/addons-napi/??_*/binding.gyp, \ + $(wildcard test/addons-napi/*/binding.gyp)) + +ADDONS_NAPI_BINDING_SOURCES := \ + $(filter-out test/addons-napi/??_*/*.cc, $(wildcard test/addons-napi/*/*.cc)) \ + $(filter-out test/addons-napi/??_*/*.h, $(wildcard test/addons-napi/*/*.h)) + +# Implicitly depends on $(NODE_EXE), see the build-addons-napi rule for rationale. +test/addons-napi/.buildstamp: config.gypi \ + deps/npm/node_modules/node-gyp/package.json \ + $(ADDONS_NAPI_BINDING_GYPS) $(ADDONS_NAPI_BINDING_SOURCES) \ + deps/uv/include/*.h deps/v8/include/*.h \ + src/node.h src/node_buffer.h src/node_object_wrap.h src/node_version.h \ + src/node_api.h src/node_api_types.h +# Cannot use $(wildcard test/addons-napi/*/) here, it's evaluated before +# embedded addons have been generated from the documentation. + @for dirname in test/addons-napi/*/; do \ + printf "\nBuilding addon $$PWD/$$dirname\n" ; \ + env MAKEFLAGS="-j1" $(NODE) deps/npm/node_modules/node-gyp/bin/node-gyp \ + --loglevel=$(LOGLEVEL) rebuild \ + --python="$(PYTHON)" \ + --directory="$$PWD/$$dirname" \ + --nodedir="$$PWD" || exit 1 ; \ + done + touch $@ + +# .buildstamp and .docbuildstamp need $(NODE_EXE) but cannot depend on it +# directly because it calls make recursively. The parent make cannot know +# if the subprocess touched anything so it pessimistically assumes that +# .buildstamp and .docbuildstamp are out of date and need a rebuild. +# Just goes to show that recursive make really is harmful... +# TODO(bnoordhuis) Force rebuild after gyp or node-gyp update. +build-addons-napi: $(NODE_EXE) test/addons-napi/.buildstamp + clear-stalled: # Clean up any leftover processes but don't error if found. ps awwx | grep Release/node | grep -v grep | cat @@ -200,7 +236,9 @@ clear-stalled: test-gc: all test/gc/node_modules/weak/build/Release/weakref.node $(PYTHON) tools/test.py --mode=release gc -test-build: | all build-addons +test-build: | all build-addons build-addons-napi + +test-build-addons-napi: all build-addons-napi test-all: test-build test/gc/node_modules/weak/build/Release/weakref.node $(PYTHON) tools/test.py --mode=debug,release @@ -208,12 +246,12 @@ test-all: test-build test/gc/node_modules/weak/build/Release/weakref.node test-all-valgrind: test-build $(PYTHON) tools/test.py --mode=debug,release --valgrind -CI_NATIVE_SUITES := addons +CI_NATIVE_SUITES := addons addons-napi CI_JS_SUITES := doctool inspector known_issues message parallel pseudo-tty sequential # Build and test addons without building anything else test-ci-native: LOGLEVEL := info -test-ci-native: | test/addons/.buildstamp +test-ci-native: | test/addons/.buildstamp test/addons-napi/.buildstamp $(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \ --mode=release --flaky-tests=$(FLAKY_TESTS) \ $(TEST_CI_ARGS) $(CI_NATIVE_SUITES) @@ -231,11 +269,11 @@ test-ci-js: | clear-stalled fi test-ci: LOGLEVEL := info -test-ci: | clear-stalled build-addons +test-ci: | clear-stalled build-addons build-addons-napi out/Release/cctest --gtest_output=tap:cctest.tap $(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \ --mode=release --flaky-tests=$(FLAKY_TESTS) \ - $(TEST_CI_ARGS) $(CI_JS_SUITES) $(CI_NATIVE_SUITES) + $(TEST_CI_ARGS) $(CI_JS_SUITES) addons-napi $(CI_NATIVE_SUITES) # Clean up any leftover processes, error if found. ps awwx | grep Release/node | grep -v grep | cat @PS_OUT=`ps awwx | grep Release/node | grep -v grep | awk '{print $$1}'`; \ @@ -282,7 +320,10 @@ test-npm: $(NODE_EXE) test-npm-publish: $(NODE_EXE) npm_package_config_publishtest=true $(NODE) deps/npm/test/run.js -test-addons: test-build +test-addons-napi: test-build-addons-napi + $(PYTHON) tools/test.py --mode=release addons-napi + +test-addons: test-build test-addons-napi $(PYTHON) tools/test.py --mode=release addons test-addons-clean: @@ -813,6 +854,8 @@ LINT_CPP_FILES = $(filter-out $(LINT_CPP_EXCLUDE), $(wildcard \ test/addons/*/*.h \ test/cctest/*.cc \ test/cctest/*.h \ + test/addons-napi/*/*.cc \ + test/addons-napi/*/*.h \ tools/icu/*.cc \ tools/icu/*.h \ )) @@ -857,5 +900,6 @@ endif bench-buffer bench-net bench-http bench-fs bench-tls cctest run-ci \ test-v8 test-v8-intl test-v8-benchmarks test-v8-all v8 lint-ci \ bench-ci lint-js-ci doc-only $(TARBALL)-headers test-ci test-ci-native \ - test-ci-js build-ci test-hash-seed clear-stalled + test-ci-js build-ci test-hash-seed clear-stalled test-addons-napi \ + build-addons-napi diff --git a/doc/api/cli.md b/doc/api/cli.md index a8aee18e3efd3e..281a7fbbfba9c3 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -119,6 +119,14 @@ added: v6.0.0 Silence all process warnings (including deprecations). +### `--napi-modules` + + +Enable loading native modules compiled with the ABI-stable Node.js API (N-API) +(experimental). + ### `--trace-warnings` +```C +NAPI_EXTERN napi_status +napi_get_last_error_info(napi_env env, + const napi_extended_error_info** result); +``` +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: The `napi_extended_error_info` structure with more +information about the error. + +Returns `napi_ok` if the API succeeded. + +This API retrieves a `napi_extended_error_info` structure with information +about the last error that occured. + +**Note:** Do not rely on the content or format of any of the extended +information as it is not subject to SemVer and may change at any time. +It is intended only for logging purposes. + + +### Exceptions +Any N-API function call may result in a pending JavaScript exception. This is +obviously the case for any function that may cause the execution of +JavaScript, but N-API specifies that an exception may be pending +on return from any of the API functions. + +If the `napi_status` returned by a function is `napi_ok` then no +exception is pending and no additional action is required. If the +`napi_status` returned is anything other than `napi_ok` or +`napi_pending_exception`, in order to try to recover and continue +instead of simply returning immediately, [`napi_is_exception_pending`][] +must be called in order to determine if an exception is pending or not. + +When an exception is pending one of two approaches can be employed. + +The first appoach is to do any appropriate cleanup and then return so that +execution will return to JavaScript. As part of the transition back to +JavaScript the exception will be thrown at the point in the JavaScript +code where the native method was invoked. The behavior of most N-API calls +is unspecified while an exception is pending, and many will simply return +`napi_pending_exception`, so it is important to do as little as possible +and then return to JavaScript where the exception can be handled. + +The second approach is to try to handle the exception. There will be cases +where the native code can catch the exception, take the appropriate action, +and then continue. This is only recommended in specific cases +where it is known that the exception can be safely handled. In these +cases [`napi_get_and_clear_last_exception`][] can be used to get and +clear the exception. On success, result will contain the handle to +the last JavaScript Object thrown. If it is determined, after +retrieving the exception, the exception cannot be handled after all +it can be re-thrown it with [`napi_throw`][] where error is the +JavaScript Error object to be thrown. + +The following utility functions are also available in case native code +needs to throw an exception or determine if a `napi_value` is an instance +of a JavaScript `Error` object: [`napi_throw_error`][], +[`napi_throw_type_error`][], [`napi_throw_range_error`][] and +[`napi_is_error`][]. + +The following utility functions are also available in case native +code needs to create an Error object: [`napi_create_error`][], +[`napi_create_type_error`][], and [`napi_create_range_error`][]. +where result is the napi_value that refers to the newly created +JavaScript Error object. + +#### napi_throw + +```C +NODE_EXTERN napi_status napi_throw(napi_env env, napi_value error); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] error`: The `napi_value` for the Error to be thrown. + +Returns `napi_ok` if the API succeeded. + +This API throws the JavaScript Error provided. + + +#### napi_throw_error + +```C +NODE_EXTERN napi_status napi_throw_error(napi_env env, const char* msg); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] msg`: C string representing the text to be associated with +the error. + +Returns `napi_ok` if the API succeeded. + +This API throws a JavaScript Error with the text provided. + +#### napi_throw_type_error + +```C +NODE_EXTERN napi_status napi_throw_type_error(napi_env env, const char* msg); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] msg`: C string representing the text to be associated with +the error. + +Returns `napi_ok` if the API succeeded. + +This API throws a JavaScript TypeError with the text provided. + +#### napi_throw_range_error + +```C +NODE_EXTERN napi_status napi_throw_range_error(napi_env env, const char* msg); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] msg`: C string representing the text to be associated with +the error. + +Returns `napi_ok` if the API succeeded. + +This API throws a JavaScript RangeError with the text provided. + + +#### napi_is_error + +```C +NODE_EXTERN napi_status napi_is_error(napi_env env, + napi_value value, + bool* result); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] msg`: The `napi_value` to be checked. +- `[out] result`: Boolean value that is set to true if `napi_value` represents +an error, false otherwise. + +Returns `napi_ok` if the API succeeded. + +This API queries a `napi_value` to check if it represents an error object. + + +#### napi_create_error + +```C +NODE_EXTERN napi_status napi_create_error(napi_env env, const char* msg); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] msg`: C string representing the text to be associated with. +- `[out] result`: `napi_value` representing the error created. + +Returns `napi_ok` if the API succeeded. + +This API returns a JavaScript Error with the text provided. + +#### napi_create_type_error + +```C +NODE_EXTERN napi_status napi_create_type_error(napi_env env, const char* msg); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] msg`: C string representing the text to be associated with. +- `[out] result`: `napi_value` representing the error created. + +Returns `napi_ok` if the API succeeded. + +This API returns a JavaScript TypeError with the text provided. + + +#### napi_create_range_error + +```C +NODE_EXTERN napi_status napi_create_range_error(napi_env env, const char* msg); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] msg`: C string representing the text to be associated with. +- `[out] result`: `napi_value` representing the error created. + +Returns `napi_ok` if the API succeeded. + +This API returns a JavaScript RangeError with the text provided. + + +#### napi_get_and_clear_last_exception + +```C +NAPI_EXTERN napi_status napi_get_and_clear_last_exception(napi_env env, + napi_value* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: The exception if one is pending, NULL otherwise. + +Returns `napi_ok` if the API succeeded. + +This API returns true if an exception is pending. + +#### napi_is_exception_pending + +```C +NAPI_EXTERN napi_status napi_is_exception_pending(napi_env env, bool* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: Boolean value that is set to true if an exception is pending. + +Returns `napi_ok` if the API succeeded. + +This API returns true if an exception is pending. + + +## Object Lifetime management + +As N-API calls are made, handles to objects in the heap for the underlying +VM may be returned as `napi_values`. These handles must hold the +objects 'live' until they are no longer required by the native code, +otherwise the objects could be collected before the native code was +finished using them. + +As object handles are returned they are associated with a +'scope'. The lifespan for the default scope is tied to the lifespan +of the native method call. The result is that, by default, handles +remain valid and the objects associated with these handles will be +held live for the lifespan of the native method call. + +In many cases, however, it is necessary that the handles remain valid for +either a shorter or longer lifespan than that of the native method. +The sections which follow describe the N-API functions than can be used +to change the handle lifespan from the default. + +### Making handle lifespan shorter than that of the native method +It is often necessary to make the lifespan of handles shorter than +the lifespan of a native method. For example, consider a native method +that has a loop which iterates through the elements in a large array: + +```C +for (int i = 0; i < 1000000; i++) { + napi_value result; + napi_status status = napi_get_element(e object, i, &result); + if (status != napi_ok) { + break; + } + // do something with element +} +``` + +This would result in a large number of handles being created, consuming +substantial resources. In addition, even though the native code could only +use the most recent handle, all of the associated objects would also be +kept alive since they all share the same scope. + +To handle this case, N-API provides the ability to establish a new 'scope' to +which newly created handles will be associated. Once those handles +are no longer required, the scope can be 'closed' and any handles associated +with the scope are invalidated. The methods available to open/close scopes are +[`napi_open_handle_scope`][] and [`napi_close_handle_scope`][]. + +N-API only supports a single nested hiearchy of scopes. There is only one +active scope at any time, and all new handles will be associated with that +scope while it is active. Scopes must be closed in the reverse order from +which they are opened. In addition, all scopes created within a native method +must be closed before returning from that method. + +Taking the earlier example, adding calls to [`napi_open_handle_scope`][] and +[`napi_close_handle_scope`][] would ensure that at most a single handle +is valid throughout the execution of the loop: + +```C +for (int i = 0; i < 1000000; i++) {napi_ + napi_handle_scope scope; + napi_status status = napi_open_handle_scope(env, &scope); + if (status != napi_ok) { + break; + } + napi_value result; + status = napi_get_element(e object, i, &result); + if (status != napi_ok) { + break; + } + // do something with element + status = napi_close_handle_scope(env, scope); + if (status != napi_ok) { + break; + } +} +``` + +When nesting scopes, there are cases where a handle from an +inner scope needs to live beyond the lifespan of that scope. N-API supports an +'escapable scope' in order to support this case. An escapable scope +allows one or more handles to be 'promoted' so that they 'escape' the +current scope and the lifespan of the handle(s) changes from the current +scope to that of the outer scope. + +The methods available to open/close escapable scopes are +[`napi_open_escapable_handle_scope`][] and [`napi_close_escapable_handle_scope`][]. + +The request to promote a handle is made through the [`napi_escape_handle`][]. + +#### napi_open_handle_scope + +```C +NODE_EXTERN napi_status napi_open_handle_scope(napi_env env, + napi_handle_scope* result); +``` +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: `napi_value` representing the new scope. + +Returns `napi_ok` if the API succeeded. + +This API open a new scope. + +#### napi_close_handle_scope + +```C +NODE_EXTERN napi_status napi_close_handle_scope(napi_env env, + napi_handle_scope scope); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] scope`: `napi_value` representing the scope to be closed. + +Returns `napi_ok` if the API succeeded. + +This API closes the scope passed in. Scopes must be closed in the +reverse order from which they were created. + +#### napi_open_escapable_handle_scope + +```C +NODE_EXTERN napi_status + napi_open_escapable_handle_scope(napi_env env, + napi_handle_scope* result); +``` +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: `napi_value` representing the new scope. + +Returns `napi_ok` if the API succeeded. + +This API open a new scope from which objects can be promoted +to the outer scope. + +#### napi_close_escapable_handle_scope + +```C +NODE_EXTERN napi_status + napi_close_escapable_handle_scope(napi_env env, + napi_handle_scope scope); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] scope`: `napi_value` representing the scope to be closed. + +Returns `napi_ok` if the API succeeded. + +This API closes the scope passed in. Scopes must be closed in the +reverse order from which they were created. + +#### napi_escape_handle + +```C +NAPI_EXTERN napi_status napi_escape_handle(napi_env env, + napi_escapable_handle_scope scope, + napi_value escapee, + napi_value* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] scope`: `napi_value` representing the current scope. +- `[in] escapee`: `napi_value` representing the JavaScript Object to be escaped. +- `[out] result`: `napi_value` representing the handle to the escaped +Object in the outer scope. + +Returns `napi_ok` if the API succeeded. + +This API promotes the handle to the JavaScript object so that it valid +for the lifetime of the outer scope. + + +### References to objects with a lifespan longer than that of the native method +In some cases an addon will need to be able to create and reference objects +with a lifespan longer than that of a single native method invocation. For +example, to create a constructor and later use that constructor +in a request to creates instances, it must be possible to reference +the constructor object across many different instance creation requests. This +would not be possible with a normal handle returned as a `napi_value` as +described in the earlier section. The lifespan of a normal handle is +managed by scopes and all scopes must be closed before the end of a native +method. + +N-API provides methods to create persistent references to an object. +Each persistent reference has an associated count with a value of 0 +or higher. The count determines if the reference will keep +the corresponding object live. References with a count of 0 do not +prevent the object from being collected and are often called 'weak' +references. Any count greater than 0 will prevent the object +from being collected. + +References can be created with an initial reference count. The count can +then be modified through [`napi_reference_ref`][] and +[`napi_reference_unref`][]. If an object is collected while the count +for a reference is 0, all subsequent calls to +get the object associated with the reference [`napi_get_reference_value`][] +will return NULL for the returned `napi_value`. An attempt to call +[`napi_reference_ref`][] for a reference whose object has been collected +will result in an error. + +References must be deleted once they are no longer required by the addon. When +a reference is deleted it will no longer prevent the corresponding object from +being collected. Failure to delete a persistent reference will result in +a 'memory leak' with both the native memory for the persistent reference and +the corresponding object on the heap being retained forever. + +There can be multiple persistent references created which refer to the same +object, each of which will either keep the object live or not based on its +individual count. + +#### napi_create_reference + +```C +NODE_EXTERN napi_status napi_create_reference(napi_env env, + napi_value value, + int initial_refcount, + ndapi_ref* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing the Object to which we want +a reference to. +- `[in] initial_refcount`: Initial reference count for the new reference. +- `[out] result`: `napi_ref` pointing to the new reference. + +Returns `napi_ok` if the API succeeded. + +This API create a new reference with the specified reference count +to the Object passed in. + +#### napi_delete_reference + +```C +NODE_EXTERN napi_status napi_delete_reference(napi_env env, napi_ref ref); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] ref`: `napi_ref` to be deleted. + +Returns `napi_ok` if the API succeeded. + +This API deletes the reference passed in. + +#### napi_reference_ref + +```C +NODE_EXTERN napi_status napi_reference_ref(napi_env env, + napi_ref ref, + int* result); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] ref`: `napi_ref` for which the reference count will be incremented. +- `[out] result`: The new reference count. + +Returns `napi_ok` if the API succeeded. + +This API increments the reference count for the reference +passed in and returns the resulting reference count. + + +#### napi_reference_unref + +```C +NODE_EXTERN napi_status napi_reference_unref(napi_env env, + napi_ref ref, + int* result); +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] ref`: `napi_ref` for which the reference count will be decremented. +- `[out] result`: The new reference count. + +Returns `napi_ok` if the API succeeded. + +This API decrements the reference count for the reference +passed in and returns the resulting reference count. + + +#### napi_get_reference_value + +```C +NODE_EXTERN napi_status napi_get_reference_value(napi_env env, + napi_ref ref, + napi_value* result); +``` + +the `napi_value passed` in or out of these methods is a handle to the +object to which the reference is related. +- `[in] env`: The environment that the API is invoked under. +- `[in] ref`: `napi_ref` for which we requesting the corresponding Object. +- `[out] result`: The `napi_value` for the Object referenced by the +`napi_ref`. + +Returns `napi_ok` if the API succeeded. + +If still valid, this API returns the `napi_value` representing the +JavaScript Object associated with the `napi_ref`. Otherise, result +will be NULL. + +## Module registration +N-API modules are registered in the same manner as other modules +except that instead of using the `NODE_MODULE` macro the following +is used: + +```C +NAPI_MODULE(addon, Init) +``` + +The next difference is the signature for the `Init` method. For a N-API +module it is as follows: + +```C +void Init(napi_env env, napi_value exports, napi_value module, void* priv); +``` + +As with any other module, functions are exported by either adding them to +the `exports` or `module` objects passed to the `Init` method. + +For example, to add the method `hello` as a function so that it can be called +as a method provided by the addon: + +```C +void Init(napi_env env, napi_value exports, napi_value module, void* priv) { + napi_status status; + napi_property_descriptor desc = + {"hello", Method, 0, 0, 0, napi_default, 0}; + status = napi_define_properties(env, exports, 1, &desc); +} +``` + +For example, to set a function to be returned by the `require()` for the addon: + +```C +void Init(napi_env env, napi_value exports, napi_value module, void* priv) { + napi_status status; + napi_property_descriptor desc = + {"exports", Method, 0, 0, 0, napi_default, 0}; + status = napi_define_properties(env, module, 1, &desc); +} +``` + +For example, to define a class so that new instances can be created +(often used with [Object Wrap][]): + +```C +// NOTE: partial example, not all referenced code is included + +napi_status status; +napi_property_descriptor properties[] = { + { "value", nullptr, GetValue, SetValue, 0, napi_default, 0 }, + DECLARE_NAPI_METHOD("plusOne", PlusOne), + DECLARE_NAPI_METHOD("multiply", Multiply), +}; + +napi_value cons; +status = + napi_define_class(env, "MyObject", New, nullptr, 3, properties, &cons); +if (status != napi_ok) return; + +status = napi_create_reference(env, cons, 1, &constructor); +if (status != napi_ok) return; + +status = napi_set_named_property(env, exports, "MyObject", cons); +if (status != napi_ok) return; +``` + +For more details on setting properties on either the `exports` or `module` +objects, see the section on +[Working with JavaScript Properties][]. + +For more details on building addon modules in general, refer to the existing API + +## Working with JavaScript Values +N-API exposes a set of APIs to create all types of JavaScript values. +Some of these types are documented under +[Section 6](https://tc39.github.io/ecma262/#sec-ecmascript-data-types-and-values) +of the [ECMAScript Language Specification][]. + +Fundamentally, these APIs are used to do one of the following: +1. Create a new JavaScript object +2. Convert from a primitive C type to an N-API value +3. Convert from N-API value to a primitive C type +4. Get global instances including `undefined` and `null` + +N-API values are represented by the type `napi_value`. +Any N-API call that requires a JavaScript value takes in a `napi_value`. +In some cases, the API does check the type of the `napi_value` up-front. +However, for better performance, it's better for the caller to make sure that +the `napi_value` in question is of the JavaScript type expected by the API. + +### Enum types +#### *napi_valuetype* +```C +typedef enum { + // ES6 types (corresponds to typeof) + napi_undefined, + napi_null, + napi_boolean, + napi_number, + napi_string, + napi_symbol, + napi_object, + napi_function, + napi_external, +} napi_valuetype; +``` + +Describes the type of a `napi_value`. This generally corresponds to the types +described in +[Section 6.1](https://tc39.github.io/ecma262/#sec-ecmascript-language-types) of +the ECMAScript Language Specification. +In addition to types in that section, `napi_valuetype` can also represent +Functions and Objects with external data. + +#### *napi_typedarray_type* +```C +typedef enum { + napi_int8_array, + napi_uint8_array, + napi_uint8_clamped_array, + napi_int16_array, + napi_uint16_array, + napi_int32_array, + napi_uint32_array, + napi_float32_array, + napi_float64_array, +} napi_typedarray_type; +``` + +This represents the underlying binary scalar datatype of the TypedArray. +Elements of this enum correspond to +[Section 22.2](https://tc39.github.io/ecma262/#sec-typedarray-objects) +of the [ECMAScript Language Specification][]. + +### Object Creation Functions +#### *napi_create_array* + +```C +napi_status napi_create_array(napi_env env, napi_value* result) +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[out] result`: A `napi_value` representing a JavaScript Array. + +Returns `napi_ok` if the API succeeded. + +This API returns an N-API value corresponding to a JavaScript Array type. +JavaScript arrays are described in +[Section 22.1](https://tc39.github.io/ecma262/#sec-array-objects) of the +ECMAScript Language Specification. + +#### *napi_create_array_with_length* + +```C +napi_status napi_create_array_with_length(napi_env env, + size_t length, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] length`: The initial length of the Array. +- `[out] result`: A `napi_value` representing a JavaScript Array. + +Returns `napi_ok` if the API succeeded. + +This API returns an N-API value corresponding to a JavaScript Array type. +The Array's length property is set to the passed-in length parameter. +However, the underlying buffer is not guaranteed to be pre-allocated by the VM +when the array is created - that behavior is left to the underlying VM +implementation. +if the buffer must be a contiguous block of memory that can be +directly read and/or written via C, consider using +[`napi_create_external_arraybuffer`][]. + +JavaScript arrays are described in +[Section 22.1](https://tc39.github.io/ecma262/#sec-array-objects) of the +ECMAScript Language Specification. + +#### *napi_create_arraybuffer* + +```C +napi_status napi_create_arraybuffer(napi_env env, + size_t byte_length, + void** data, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] length`: The length in bytes of the array buffer to create. +- `[out] data`: Pointer to the underlying byte buffer of the ArrayBuffer. +- `[out] result`: A `napi_value` representing a JavaScript ArrayBuffer. + +Returns `napi_ok` if the API succeeded. + +This API returns an N-API value corresponding to a JavaScript ArrayBuffer. +ArrayBuffers are used to represent fixed-length binary data buffers. They are +normally used as a backing-buffer for TypedArray objects. +The ArrayBuffer allocated will have an underlying byte buffer whose size is +determined by the `length` parameter that's passed in. +The underlying buffer is optionally returned back to the caller in case the +caller wants to directly manipulate the buffer. This buffer can only be +written to directly from native code. To write to this buffer from JavaScript, +a typed array or DataView object would need to be created. + +JavaScript ArrayBuffer objects are described in +[Section 24.1](https://tc39.github.io/ecma262/#sec-arraybuffer-objects) +of the ECMAScript Language Specification. + +#### *napi_create_buffer* + +```C +napi_status napi_create_buffer(napi_env env, + size_t size, + void** data, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] size`: Size in bytes of the underlying buffer. +- `[out] data`: Raw pointer to the underlying buffer. +- `[out] result`: A `napi_value` representing a `node::Buffer`. + +Returns `napi_ok` if the API succeeded. + +This API allocates a `node::Buffer` object. While this is still a +fully-supported data structure, in most cases using a TypedArray will suffice. + +#### *napi_create_buffer_copy* + +```C +napi_status napi_create_buffer_copy(napi_env env, + size_t length, + const void* data, + void** result_data, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] size`: Size in bytes of the input buffer (should be the same as the + size of the new buffer). +- `[in] data`: Raw pointer to the underlying buffer to copy from. +- `[out] result_data`: Pointer to the new Buffer's underlying data buffer. +- `[out] result`: A `napi_value` representing a `node::Buffer`. + +Returns `napi_ok` if the API succeeded. + +This API allocates a `node::Buffer` object and initializes it with data copied +from the passed-in buffer. While this is still a fully-supported data +structure, in most cases using a TypedArray will suffice. + +#### *napi_create_external* + +```C +napi_status napi_create_external(napi_env env, + void* data, + napi_finalize finalize_cb, + void* finalize_hint, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] data`: Raw pointer to the external data being wrapped. +- `[in] finalize_cb`: Optional callback to call when the wrapped object +is being collected. +- `[in] finalize_hint`: Optional hint to pass to the finalize callback +during collection. +- `[out] result`: A `napi_value` representing an external object. + +Returns `napi_ok` if the API succeeded. + +This API allocates a JavaScript object with external data attached to it. +This is used to wrap native objects and project them into JavaScript. +The API allows the caller to pass in a finalize callback, in case the +underlying native resource needs to be cleaned up when the wrapper +JavaScript object gets collected. + +#### napi_create_external_arraybuffer + +```C +napi_status +napi_create_external_arraybuffer(napi_env env, + void* external_data, + size_t byte_length, + napi_finalize finalize_cb, + void* finalize_hint, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] external_data`: Pointer to the underlying byte buffer of the +ArrayBuffer. +- `[in] byte_length`: The length in bytes of the underlying buffer. +- `[in] finalize_cb`: Optional callback to call when the ArrayBuffer is +being collected. +- `[in] finalize_hint`: Optional hint to pass to the finalize callback +during collection. +- `[out] result`: A `napi_value` representing a JavaScript ArrayBuffer. + +Returns `napi_ok` if the API succeeded. + +This API returns an N-API value corresponding to a JavaScript ArrayBuffer. +The underlying byte buffer of the ArrayBuffer is externally allocated and +managed. The caller must ensure that the byte buffer remains valid until the +finalize callback is called. + +JavaScript ArrayBuffers are described in +[Section 24.1](https://tc39.github.io/ecma262/#sec-arraybuffer-objects) +of the ECMAScript Language Specification. + +#### *napi_create_external_buffer* + +```C +napi_status napi_create_external_buffer(napi_env env, + size_t length, + void* data, + napi_finalize finalize_cb, + void* finalize_hint, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] length`: Size in bytes of the input buffer (should be the same as +the size of the new buffer). +- `[in] data`: Raw pointer to the underlying buffer to copy from. +- `[in] finalize_cb`: Optional callback to call when the ArrayBuffer is +being collected. +- `[in] finalize_hint`: Optional hint to pass to the finalize callback +during collection. +- `[out] result`: A `napi_value` representing a `node::Buffer`. + +Returns `napi_ok` if the API succeeded. + +This API allocates a `node::Buffer` object and initializes it with data +backed by the passed in buffer. While this is still a fully-supported data +structure, in most cases using a TypedArray will suffice. + +**Note:** For Node.js >=4 `Buffers` are Uint8Arrays. + +#### *napi_create_function* + +```C +napi_status napi_create_function(napi_env env, + const char* utf8name, + napi_callback cb, + void* data, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] utf8name`: A string representing the name of the function encoded as +UTF8. +- `[in] cb`: A function pointer to the native function to be invoked when the +created function is invoked from JavaScript. +- `[in] data`: Optional arbitrary context data to be passed into the native +function when it is invoked. +- `[out] result`: A `napi_value` representing a JavaScript function. + +Returns `napi_ok` if the API succeeded. + +This API returns an N-API value corresponding to a JavaScript Function object. +It's used to wrap native functions so that they can be invoked from JavaScript. + +JavaScript Functions are described in +[Section 19.2](https://tc39.github.io/ecma262/#sec-function-objects) +of the ECMAScript Language Specification. + +#### *napi_create_object* + +```C +napi_status napi_create_object(napi_env env, napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: A `napi_value` representing a JavaScript Object. + +Returns `napi_ok` if the API succeeded. + +This API allocates a default JavaScript Object. +It is the equivalent of doing `new Object()` in JavaScript. + +The JavaScript Object type is described in +[Section 6.1.7](https://tc39.github.io/ecma262/#sec-object-type) of the +ECMAScript Language Specification. + +#### *napi_create_symbol* + +```C +napi_status napi_create_symbol(napi_env env, + const char* description, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] description`: Null-terminated character buffer representing a +UTF8-encoded string to describe the symbol. +- `[out] result`: A `napi_value` representing a JavaScript Symbol. + +Returns `napi_ok` if the API succeeded. + +This API creates a JavaScript Symbol object from a UTF8-encoded C string + +The JavaScript Symbol type is described in +[Section 19.4](https://tc39.github.io/ecma262/#sec-symbol-objects) +of the ECMAScript Language Specification. + +#### *napi_create_typedarray* + +```C +napi_status napi_create_typedarray(napi_env env, + napi_typedarray_type type, + size_t length, + napi_value arraybuffer, + size_t byte_offset, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] type`: Scalar datatype of the elements within the TypedArray. +- `[in] length`: Number of elements in the TypedArray. +- `[in] arraybuffer`: ArrayBuffer underlying the typed array. +- `[in] byte_offset`: The byte offset within the ArrayBuffer from which to +start projecting the TypedArray. +- `[out] result`: A `napi_value` representing a JavaScript TypedArray. + +Returns `napi_ok` if the API succeeded. + +This API creates a JavaScript TypedArray object over an existing ArrayBuffer. +TypedArray objects provide an array-like view over an underlying data buffer +where each element has the same underlying binary scalar datatype. + +It's required that (length * size_of_element) + byte_offset should +be <= the size in bytes of the array passed in. If not, a RangeError exception is +raised. + +JavaScript TypedArray Objects are described in +[Section 22.2](https://tc39.github.io/ecma262/#sec-typedarray-objects) +of the ECMAScript Language Specification. + +### Functions to convert from C types to N-API +#### *napi_create_number* + +```C +napi_status napi_create_number(napi_env env, double value, napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: Double-precision value to be represented in JavaScript. +- `[out] result`: A `napi_value` representing a JavaScript Number. + +Returns `napi_ok` if the API succeeded. + +This API is used to convert from the C double type to the JavaScript +Number type. + +The JavaScript Number type is described in +[Section 6.1.6](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type) +of the ECMAScript Language Specification. + +#### *napi_create_string_utf16* + +```C +napi_status napi_create_string_utf16(napi_env env, + const char16_t* str, + size_t length, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] str`: Character buffer representing a UTF16-LE-encoded string. +- `[in] length`: The length of the string in characters, or -1 if it is +null-terminated. +- `[out] result`: A `napi_value` representing a JavaScript String. + +Returns `napi_ok` if the API succeeded. + +This API creates a JavaScript String object from a UTF16-LE-encoded C string + +The JavaScript String type is described in +[Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type) +of the ECMAScript Language Specification. + +#### *napi_create_string_utf8* + +```C +napi_status napi_create_string_utf8(napi_env env, + const char* str, + size_t length, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] s`: Character buffer representing a UTF8-encoded string. +- `[in] length`: The length of the string in characters, or -1 if it is +null-terminated. +- `[out] result`: A `napi_value` representing a JavaScript String. + +Returns `napi_ok` if the API succeeded. + +This API creates a JavaScript String object from a UTF8-encoded C string + +The JavaScript String type is described in +[Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type) +of the ECMAScript Language Specification. + +### Functions to convert from N-API to C types +#### *napi_get_array_length* + +```C +napi_status napi_get_array_length(napi_env env, + napi_value value, + uint32_t* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing the JavaScript Array whose length is +being queried. +- `[out] result`: `uint32` representing length of the array. + +Returns `napi_ok` if the API succeeded. + +This API returns the length of an array. + +Array length is described in +[Section 22.1.4.1](https://tc39.github.io/ecma262/#sec-properties-of-array-instances-length) +of the ECMAScript Language Specification. + +#### *napi_get_arraybuffer_info* + +```C +napi_status napi_get_arraybuffer_info(napi_env env, + napi_value arraybuffer, + void** data, + size_t* byte_length) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] arraybuffer`: `napi_value` representing the ArrayBuffer being queried. +- `[out] data`: The underlying data buffer of the ArrayBuffer. +- `[out] byte_length`: Length in bytes of the underlying data buffer. + +Returns `napi_ok` if the API succeeded. + +This API is used to retrieve the underlying data buffer of an ArrayBuffer and +its length. +WARNING: Use caution while using this API. The lifetime of the underlying data +buffer is managed by the ArrayBuffer even after it's returned. A +possible safe way to use this API is in conjunction with [`napi_create_reference`][], +which can be used to guarantee control over the lifetime of the +ArrayBuffer. It's also safe to use the returned data buffer within the same +callback as long as there are no calls to other APIs that might trigger a GC. + +#### *napi_get_buffer_info* + +```C +napi_status napi_get_buffer_info(napi_env env, + napi_value value, + void** data, + size_t* length) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing the `node::Buffer` being queried. +- `[out] data`: The underlying data buffer of the `node::Buffer`. +- `[out] length`: Length in bytes of the underlying data buffer. + +Returns `napi_ok` if the API succeeded. + +This API is used to retrieve the underlying data buffer of a `node::Buffer` +and it's length. +Warning: Use caution while using this API since the underlying data buffer's +lifetime is not guaranteed if it's managed by the VM. + +#### *napi_get_prototype* + +```C +napi_status napi_get_prototype(napi_env env, + napi_value object, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] object`: `napi_value` representing JavaScript Object whose prototype +to return. This returns the equivalent of `Object.getPrototypeOf` (which is +not the same as the function's `prototype` property). +- `[out] result`: `napi_value` representing prototype of the given object. + +Returns `napi_ok` if the API succeeded. + +#### *napi_get_typedarray_info* + +```C +napi_status napi_get_typedarray_info(napi_env env, + napi_value typedarray, + napi_typedarray_type* type, + size_t* length, + void** data, + napi_value* arraybuffer, + size_t* byte_offset) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] typedarray`: `napi_value` representing the TypedArray whose +properties to query. +- `[out] type`: Scalar datatype of the elements within the TypedArray. +- `[out] length`: Number of elements in the TypedArray. +- `[out] data`: The data buffer underlying the typed array. +- `[out] byte_offset`: The byte offset within the data buffer from which +to start projecting the TypedArray. + +Returns `napi_ok` if the API succeeded. + +This API returns various properties of a typed array. +Warning: Use caution while using this API since the underlying data buffer +is managed by the VM + +#### *napi_get_value_bool* + +```C +napi_status napi_get_value_bool(napi_env env, napi_value value, bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript Boolean. +- `[out] result`: C boolean primitive equivalent of the given JavaScript +Boolean. + +Returns `napi_ok` if the API succeeded. If a non-boolean `napi_value` is +passed in it returns `napi_boolean_expected`. + +This API returns C boolean primitive equivalent of the given JavaScript +Boolea + +#### *napi_get_value_double* + +```C +napi_status napi_get_value_double(napi_env env, + napi_value value, + double* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript Number. +- `[out] result`: C double primitive equivalent of the given JavaScript +Number. + +Returns `napi_ok` if the API succeeded. If a non-number `napi_value` is passed +in it returns `napi_number_expected`. + +This API returns the C double primitive equivalent of the given JavaScript +Number. + + +#### *napi_get_value_external* + +```C +napi_status napi_get_value_external(napi_env env, + napi_value value, + void** result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript External value. +- `[out] result`: Pointer to the data wrapped by the JavaScript External value. + +Returns `napi_ok` if the API succeeded. If a non-external `napi_value` is +passed in it returns `napi_invalid_arg`. + +This API returns the pointer to the data wrapped by the JavaScript +External value + +#### *napi_get_value_int32* + +```C +napi_status napi_get_value_int32(napi_env env, + napi_value value, + int32_t* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript Number. +- `[out] result`: C int32 primitive equivalent of the given JavaScript Number. + +Returns `napi_ok` if the API succeeded. If a non-number `napi_value` +is passed in `napi_number_expected . + +This API returns the C int32 primitive equivalent +of the given JavaScript Number. If the number exceeds the range of the +32 bit integer, then the result is truncated to the equivalent of the +bottom 32 bits. This can result in a large positive number becoming +a negative number if the the value is > 2^31 -1. + +#### *napi_get_value_int64* + +```C +napi_status napi_get_value_int64(napi_env env, + napi_value value, + int64_t* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript Number. +- `[out] result`: C int64 primitive equivalent of the given JavaScript Number. + +Returns `napi_ok` if the API succeeded. If a non-number `napi_value` +is passed in it returns `napi_number_expected`. + +This API returns the C int64 primitive equivalent of the given +JavaScript Number + +#### *napi_get_value_string_length* + +```C +napi_status napi_get_value_string_length(napi_env env, + napi_value value, + int* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript string. +- `[out] result`: Number of characters in the given JavaScript string. + +Returns `napi_ok` if the API succeeded. If a non-String `napi_value` +is passed in it returns `napi_string_expected`. + +This API returns the number of characters in the given JavaScript string. + +#### *napi_get_value_string_utf8* + +```C +napi_status napi_get_value_string_utf8(napi_env env, + napi_value value, + char* buf, + size_t bufsize, + size_t* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript string. +- `[in] buf`: Buffer to write the UTF8-encoded string into. If NULL is passed + in, the length of the string (in bytes) is returned. +- `[in] bufsize`: Size of the destination buffer. +- `[out] result`: Number of bytes copied into the buffer including the null. +terminator. If the buffer size is insufficient, the string will be truncated +including a null terminator. + +Returns `napi_ok` if the API succeeded. Ifa non-String `napi_value` +x is passed in it returns `napi_string_expected`. + +This API returns the UTF8-encoded string corresponding the value passed in. + +#### *napi_get_value_string_utf16_length* + +```C +napi_status napi_get_value_string_utf16(napi_env env, + napi_value value, + char16_t* buf, + size_t bufsize, + size_t* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript string. +- `[in] buf`: Buffer to write the UTF16-LE-encoded string into. If NULL is +passed in, the length of the string (in 2-byte code units) is returned. +- `[in] bufsize`: Size of the destination buffer. +- `[out] result`: Number of 2-byte code units copied into the buffer including +the null terminateor. If the buffer size is insufficient, the string will be +truncated including a null terminator. + +Returns `napi_ok` if the API succeeded. If a non-String `napi_value` +is passed in it returns `napi_string_expected`. + +This API returns the UTF16-encoded string corresponding the value passed in. + +#### *napi_get_value_uint32* + +```C +napi_status napi_get_value_uint32(napi_env env, + napi_value value, + uint32_t* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript Number. +- `[out] result`: C primitive equivalent of the given `napi_value` as a +`uint32_t`. + +Returns `napi_ok` if the API succeeded. If a non-number `napi_value` +is passed in it returns `napi_number_expected`. + +This API returns the C primitive equivalent of the given `napi_value` as a +`uint32_t`. + +### Functions to get global instances +#### *napi_get_boolean* + +```C +napi_status napi_get_boolean(napi_env env, bool value, napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The value of the boolean to retrieve. +- `[out] result`: `napi_value` representing JavaScript Boolean singleton to +retrieve. + +Returns `napi_ok` if the API succeeded. + +This API is used to return the JavaScript singleton object that is used to +represent the given boolean value + +#### *napi_get_global* + +```C +napi_status napi_get_global(napi_env env, napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: `napi_value` representing JavaScript Global Object. + +Returns `napi_ok` if the API succeeded. + +This API returns the global Object. + +#### *napi_get_null* + +```C +napi_status napi_get_null(napi_env env, napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: `napi_value` representing JavaScript Null Object. + +Returns `napi_ok` if the API succeeded. + +This API returns the null Object. + +#### *napi_get_undefined* + +```C +napi_status napi_get_undefined(napi_env env, napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: `napi_value` representing JavaScript Undefined value. + +Returns `napi_ok` if the API succeeded. + +This API returns the Undefined object. + +## Working with JavaScript Values - Abstract Operations + +N-API exposes a set of APIs to perform some abstract operations on JavaScript +values. Some of these operations are documented under +[Section 7](https://tc39.github.io/ecma262/#sec-abstract-operations) +of the [ECMAScript Language Specification](https://tc39.github.io/ecma262/). + +These APIs support doing one of the following: +1. Coerce JavaScript values to specific JavaScript types (such as Number or + String) +2. Check the type of a JavaScript value +3. Check for equality between two JavaScript values + +### *napi_coerce_to_bool* + +```C +napi_status napi_coerce_to_bool(napi_env env, + napi_value value, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to coerce. +- `[out] result`: `napi_value` representing the coerced JavaScript Boolean. + +Returns `napi_ok` if the API succeeded. + +This API implements the abstract operation ToBoolean as defined in +[Section 7.1.2](https://tc39.github.io/ecma262/#sec-toboolean) +of the ECMAScript Language Specification. +This API can be re-entrant if getters are defined on the passed-in Object. + +### *napi_coerce_to_number* + +```C +napi_status napi_coerce_to_number(napi_env env, + napi_value value, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to coerce. +- `[out] result`: `napi_value` representing the coerced JavaScript Number. + +Returns `napi_ok` if the API succeeded. + +This API implements the abstract operation ToNumber as defined in +[Section 7.1.3](https://tc39.github.io/ecma262/#sec-tonumber) +of the ECMAScript Language Specification. +This API can be re-entrant if getters are defined on the passed-in Object. + +### *napi_coerce_to_object* + +```C +napi_status napi_coerce_to_object(napi_env env, + napi_value value, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to coerce. +- `[out] result`: `napi_value` representing the coerced JavaScript Object. + +Returns `napi_ok` if the API succeeded. + +This API implements the abstract operation ToObject as defined in +[Section 7.1.13](https://tc39.github.io/ecma262/#sec-toobject) +of the ECMAScript Language Specification. +This API can be re-entrant if getters are defined on the passed-in Object. + +### *napi_coerce_to_string* + +```C +napi_status napi_coerce_to_string(napi_env env, + napi_value value, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to coerce. +- `[out] result`: `napi_value` representing the coerced JavaScript String. + +Returns `napi_ok` if the API succeeded. + +This API implements the abstract operation ToString as defined in +[Section 7.1.13](https://tc39.github.io/ecma262/#sec-tostring) +of the ECMAScript Language Specification. +This API can be re-entrant if getters are defined on the passed-in Object. + +### *napi_typeof* + +```C +napi_status napi_typeof(napi_env env, napi_value value, napi_valuetype* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value whose type to query. +- `[out] result`: The type of the JavaScript value. + +Returns `napi_ok` if the API succeeded. +- `napi_invalid_arg` if the type of `value` is not a known ECMAScript type and + `value` is not an External value. + +This API represents behavior similar to invoking the `typeof` Operator on +the object as defined in [Section 12.5.5][] of the ECMAScript Language +Specification. However, it has support for detecting an External value. +If `value` has a type that is invalid, an error is returned. + +### *napi_instanceof* + +```C +napi_status napi_instanceof(napi_env env, + napi_value object, + napi_value constructor, + bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] object`: The JavaScript value to check. +- `[in] constructor`: The JavaScript function object of the constructor +function to check against. +- `[out] result`: Boolean that is set to true if `object instanceof constructor` +is true. + +Returns `napi_ok` if the API succeeded. + +This API represents invoking the `instanceof` Operator on the object as +defined in +[Section 12.10.4](https://tc39.github.io/ecma262/#sec-instanceofoperator) +of the ECMAScript Language Specification. + +### *napi_is_array* + +```C +napi_status napi_is_array(napi_env env, napi_value value, bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to check. +- `[out] result`: Whether the given object is an array. + +Returns `napi_ok` if the API succeeded. + +This API represents invoking the `IsArray` operation on the object +as defined in [Section 7.2.2](https://tc39.github.io/ecma262/#sec-isarray) +of the ECMAScript Language Specification. + +### *napi_is_arraybuffer* + +```C +napi_status napi_is_arraybuffer(napi_env env, napi_value value, bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to check. +- `[out] result`: Whether the given object is an ArrayBuffer. + +Returns `napi_ok` if the API succeeded. + +This API checks if the Object passsed in is an array buffer. + +### *napi_is_buffer* + +```C +napi_status napi_is_buffer(napi_env env, napi_value value, bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to check. +- `[out] result`: Whether the given `napi_value` represents a `node::Buffer` +object. + +Returns `napi_ok` if the API succeeded. + +This API checks if the Object passsed in is a buffer. + +### *napi_is_error* + +```C +napi_status napi_is_error(napi_env env, napi_value value, bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to check. +- `[out] result`: Whether the given `napi_value` represents an Error object. + +Returns `napi_ok` if the API succeeded. + +This API checks if the Object passsed in is an Error. + +### *napi_is_typedarray* + +```C +napi_status napi_is_typedarray(napi_env env, napi_value value, bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to check. +- `[out] result`: Whether the given `napi_value` represents a TypedArray. + +Returns `napi_ok` if the API succeeded. + +This API checks if the Object passsed in is a typed array. + +### *napi_strict_equals* + +```C +napi_status napi_strict_equals(napi_env env, + napi_value lhs, + napi_value rhs, + bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] lhs`: The JavaScript value to check. +- `[in] rhs`: The JavaScript value to check against. +- `[out] result`: Whether the two `napi_value` objects are equal. + +Returns `napi_ok` if the API succeeded. + +This API represents the invocation of the Strict Equality algorithm as +defined in +[Section 7.2.14](https://tc39.github.io/ecma262/#sec-strict-equality-comparison) +of the ECMAScript Language Specification. + +## Working with JavaScript Properties + +N-API exposes a set of APIs to get and set properties on JavaScript +objects. Some of these types are documented under +[Section 7](https://tc39.github.io/ecma262/#sec-operations-on-objects) of the +[ECMAScript Language Specification](https://tc39.github.io/ecma262/). + +Properties in JavaScript are represented as a tuple of a key and a value. +Fundamentally, all property keys in N-API can be represented in one of the +following forms: +- Named: a simple UTF8-encoded string +- Integer-Indexed: an index value represented by `uint32_t` +- JavaScript value: these are represented in N-API by `napi_value`. This can +be a `napi_value` representing a String, Number or Symbol. + +N-API values are represented by the type `napi_value`. +Any N-API call that requires a JavaScript value takes in a `napi_value`. +However, it's the caller's responsibility to make sure that the +`napi_value` in question is of the JavaScript type expected by the API. + +The APIs documented in this section provide a simple interface to +get and set properties on arbitrary JavaScript objects represented by +`napi_value`. + +For instance, consider the following JavaScript code snippet: +```js +const obj = {}; +obj.myProp = 123; +``` +The equivalent can be done using N-API values with the following snippet: +```C +napi_status status = napi_generic_failure; + +// const obj = {} +napi_value obj, value; +status = napi_create_object(env, &obj); +if (status != napi_ok) return status; + +// Create a napi_value for 123 +status = napi_create_number(env, 123, &value); +if (status != napi_ok) return status; + +// obj.myProp = 123 +status = napi_set_named_property(env, obj, "myProp", value); +if (status != napi_ok) return status; +``` + +Indexed properties can be set in a similar manner. Consider the following +JavaScript snippet: +```js +const arr = []; +arr[123] = 'hello'; +``` +The equivalent can be done using N-API values with the following snippet: +```C +napi_status status = napi_generic_failure; + +// const arr = []; +napi_value arr, value; +status = napi_create_array(env, &arr); +if (status != napi_ok) return status; + +// Create a napi_value for 'hello' +status = napi_create_string_utf8(env, "hello", -1, &value); +if (status != napi_ok) return status; + +// arr[123] = 'hello'; +status = napi_set_element(env, arr, 123, value); +if (status != napi_ok) return status; +``` + +Properties can be retrieved using the APIs described in this section. +Consider the following JavaScript snippet: +```js +const arr = []; +const value = arr[123]; +``` + +The following is the approximate equivalent of the N-API counterpart: +```C +napi_status status = napi_generic_failure; + +// const arr = [] +napi_value arr, value; +status = napi_create_array(env, &arr); +if (status != napi_ok) return status; + +// const value = arr[123] +status = napi_get_element(env, arr, 123, &value); +if (status != napi_ok) return status; +``` + +Finally, multiple properties can also be defined on an object for performance +reasons. Consider the following JavaScript: +```js +const obj = {}; +Object.defineProperties(obj, { + 'foo': { value: 123, writable: true, configurable: true, enumerable: true }, + 'bar': { value: 456, writable: true, configurable: true, enumerable: true } +}); +``` + +The following is the approximate equivalent of the N-API counterpart: +```C +napi_status status = napi_status_generic_failure; + +// const obj = {}; +napi_value obj; +status = napi_create_obj(env, &obj); +if (status != napi_ok) return status; + +// Create napi_values for 123 and 456 +napi_value fooValue, barValue; +status = napi_create_number(env, 123, &fooValue); +if (status != napi_ok) return status; +status = napi_create_number(env, 456, &barValue); +if (status != napi_ok) return status; + +// Set the properties +napi_property_descriptors descriptors[] = { + { "foo", fooValue, 0, 0, 0, napi_default, 0 }, + { "bar", barValue, 0, 0, 0, napi_default, 0 } +} +status = napi_define_properties(env, + obj, + sizeof(descriptors) / sizeof(descriptors[0]), + descriptors); +if (status != napi_ok) return status; +``` + +### Structures +#### *napi_property_attributes* +```C +typedef enum { + napi_default = 0, + napi_read_only = 1 << 0, + napi_dont_enum = 1 << 1, + napi_dont_delete = 1 << 2, + napi_static_property = 1 << 10, +} napi_property_attributes; +``` + +`napi_property_attributes` are flags used to control the behavior of properties +set on a JavaScript object. They roughly correspond to the attributes listed in +[Section 6.1.7.1](https://tc39.github.io/ecma262/#table-2) of the +[ECMAScript Language Specification](https://tc39.github.io/ecma262/). They can +be one or more of the following bitflags: + +- `napi_default` - Used to indicate that no explicit attributes are set on the +given property. By default, a property is Writable, Enumerable, and + Configurable. This is a deviation from the ECMAScript specification, + where generally the values for a property descriptor attribute default to + false if they're not provided. +- `napi_read_only` - Used to indicate that a given property is not Writable. +- `napi_dont_enum` - Used to indicate that a given property is not Enumerable. +- `napi_dont_delete` - Used to indicate that a given property is not. +Configurable, as defined in +[Section 6.1.7.1](https://tc39.github.io/ecma262/#table-2) of the +[ECMAScript Language Specification](https://tc39.github.io/ecma262/). +- `napi_static_property` - Used to indicate that the property will be defined as +a static property on a class as opposed to an instance property, which is the +default. This is used only by [`napi_define_class`][]. It is ignored by +`napi_define_properties`. + +#### *napi_property_descriptor* +```C +typedef struct { + const char* utf8name; + + napi_callback method; + napi_callback getter; + napi_callback setter; + napi_value value; + + napi_property_attributes attributes; + void* data; +} napi_property_descriptor; +``` + +- `utf8name`: String describing the key for the property, encoded as UTF8. +- `value`: The value that's retrieved by a get access of the property if the + property is a data property. If this is passed in, set `getter`, `setter`, + `method` and `data` to `NULL` (since these members won't be used). +- `getter`: A function to call when a get access of the property is performed. +If this is passed in, set `value` and `method` to `NULL` (since these members +won't be used). The given function is called implicitly by the runtime when the +property is accessed from JavaScript code (or if a get on the property is +performed using a N-API call). +- `setter`: A function to call when a set access of the property is performed. +If this is passed in, set `value` and `method` to `NULL` (since these members +won't be used). The given function is called implicitly by the runtime when the +property is set from JavaScript code (or if a set on the property is +performed using a N-API call). +- `method`: Set this to make the property descriptor object's `value` +property to be a JavaScript function represented by `method`. If this is +passed in, set `value`, `getter` and `setter` to `NULL` (since these members +won't be used). +- `data`: The callback data passed into `method`, `getter` and `setter` if +this function is invoked. +- `attributes`: The attributes associated with the particular property. +See [`napi_property_attributes`](#napi_property_attributes). + +### Functions +#### *napi_get_property_names* + +```C +napi_status napi_get_property_names(napi_env env, + napi_value object, + napi_value* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object from which to retrieve the properties. +- `[out] result`: A `napi_value` representing an array of JavaScript values +that represent the property names of the object. The API can be used to +iterate over `result` using [`napi_get_array_length`][] +and [`napi_get_element`][]. + +Returns `napi_ok` if the API succeeded. + +This API returns the array of propertys for the Object passed in + +#### *napi_set_property* + +```C +napi_status napi_set_property(napi_env env, + napi_value object, + napi_value key, + napi_value value); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object on which to set the property. +- `[in] key`: The name of the property to set. +- `[in] value`: The property value. + +Returns `napi_ok` if the API succeeded. + +This API set a property on the Object passed in. + +#### *napi_get_property* + +```C +napi_status napi_get_property(napi_env env, + napi_value object, + napi_value key, + napi_value* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object from which to retrieve the property. +- `[in] key`: The name of the property to retrieve. +- `[out] result`: The value of the property. + +Returns `napi_ok` if the API succeeded. + +This API gets the requested property from the Object passed in. + + +#### *napi_has_property* + +```C +napi_status napi_has_property(napi_env env, + napi_value object, + napi_value key, + bool* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object to query. +- `[in] key`: The name of the property whose existence to check. +- `[out] result`: Whether the property exists on the object or not. + +Returns `napi_ok` if the API succeeded. + +This API checks if the Object passed in has the named property. + + +#### *napi_set_named_property* + +```C +napi_status napi_set_named_property(napi_env env, + napi_value object, + const char* utf8Name, + napi_value value); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object on which to set the property. +- `[in] utf8Name`: The name of the property to set. +- `[in] value`: The property value. + +Returns `napi_ok` if the API succeeded. + +This method is equivalent to calling [`napi_set_property`][] with a `napi_value` +created from the string passed in as `utf8Name` + +#### *napi_get_named_property* + +```C +napi_status napi_get_named_property(napi_env env, + napi_value object, + const char* utf8Name, + napi_value* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object from which to retrieve the property. +- `[in] utf8Name`: The name of the property to get. +- `[out] result`: The value of the property. + +Returns `napi_ok` if the API succeeded. + +This method is equivalent to calling [`napi_get_property`][] with a `napi_value` +created from the string passed in as `utf8Name` + +#### *napi_has_named_property* + +```C +napi_status napi_has_named_property(napi_env env, + napi_value object, + const char* utf8Name, + bool* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object to query. +- `[in] utf8Name`: The name of the property whose existence to check. +- `[out] result`: Whether the property exists on the object or not. + +Returns `napi_ok` if the API succeeded. + +This method is equivalent to calling [`napi_has_property`][] with a `napi_value` +created from the string passed in as `utf8Name` + +#### *napi_set_element* + +```C +napi_status napi_set_element(napi_env env, + napi_value object, + uint32_t index, + napi_value value); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object from which to set the properties. +- `[in] index`: The index of the property to set. +- `[in] value`: The property value. + +Returns `napi_ok` if the API succeeded. + +This API sets and element on the Object passed in. + +#### *napi_get_element* + +```C +napi_status napi_get_element(napi_env env, + napi_value object, + uint32_t index, + napi_value* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object from which to retrieve the property. +- `[in] index`: The index of the property to get. +- `[out] result`: The value of the property. + +Returns `napi_ok` if the API succeeded. + +This API gets the element at the requested index. + +#### *napi_has_element* + +```C +napi_status napi_has_element(napi_env env, + napi_value object, + uint32_t index, + bool* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object to query. +- `[in] index`: The index of the property whose existence to check. +- `[out] result`: Whether the property exists on the object or not. + +Returns `napi_ok` if the API succeeded. + +This API returns if the Object passed in has an element at the +requested index. + +#### *napi_define_properties* + +```C +napi_status napi_define_properties(napi_env env, + napi_value object, + size_t property_count, + const napi_property_descriptor* properties); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object from which to retrieve the properties. +- `[in] property_count`: The number of elements in the `properties` array. +- `[in] properties`: The array of property descriptors. + +Returns `napi_ok` if the API succeeded. + +This method allows the efficient definition of multiple properties on a given +object. The properties are defined using property descriptors (See +[`napi_property_descriptor`][]). Given an array of such property descriptors, this +API will set the properties on the object one at a time, as defined by +DefineOwnProperty (described in [Section 9.1.6][] of the ECMA262 specification). + +## Working with JavaScript Functions + +N-API provides a set of APIs that allow JavaScript code to +call back into native code. N-API APIs that support calling back +into native code take in a callback functions represented by +the `napi_callback` type. When the JavaScript VM calls back to +native code, the `napi_callback` function provided is invoked. The APIs +documented in this section allow the callback function to do the +following: +- Get information about the context in which the callback was invoked. +- Get the arguments passed into the callback. +- Return a `napi_value` back from the callback. + +Additionally, N-API provides a set of functions which allow calling +JavaScript functions from native code. One can either call a function +like a regular JavaScript function call, or as a constructor +function. + + +### *napi_call_function* + +```C +napi_status napi_call_function(napi_env env, + napi_value recv, + napi_value func, + int argc, + const napi_value* argv, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] recv`: The `this` object passed to the called function. +- `[in] func`: `napi_value` representing the JavaScript function +to be invoked. +- `[in] argc`: The count of elements in the `argv` array. +- `[in] argv`: Array of `napi_values` representing JavaScript values passed +in as arguments to the function. +- `[out] result`: `napi_value` representing the JavaScript object returned. + +Returns `napi_ok` if the API succeeded. + +This method allows a JavaScript function object to be called from a native +add-on. This is an primary mechanism of calling back *from* the add-on's +native code *into* JavaScript. For special cases like calling into JavaScript +after an async operation, see [`napi_make_callback`][]. + +A sample use case might look as follows. Consider the following JavaScript +snippet: +```js +function AddTwo(num) { + return num + 2; +} +``` + +Then, the above function can be invoked from a native add-on using the +following code: +```C +// Get the function named "AddTwo" on the global object +napi_value global, add_two, arg; +napi_status status = napi_get_global(env, &global); +if (status != napi_ok) return; + +status = napi_get_named_property(env, global, "AddTwo", &add_two); +if (status != napi_ok) return; + +// const arg = 1337 +status = napi_create_number(env, 1337, &arg); +if (status != napi_ok) return; + +napi_value* argv = &arg; +size_t argc = 1; + +// AddTwo(arg); +napi_value return_val; +status = napi_call_function(env, global, add_two, argc, argv, &return_val); +if (status != napi_ok) return; + +// Convert the result back to a native type +int32_t result; +status = napi_get_value_int32(env, return_val, &result); +if (status != napi_ok) return; +``` + +### *napi_create_function* + +```C +napi_status napi_create_function(napi_env env, + const char* utf8name, + napi_callback cb, + void* data, + napi_value* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] utf8Name`: The name of the function encoded as UTF8. This is visible +within JavaScript as the new function object's `name` property. +- `[in] cb`: The native function which should be called when this function +object is invoked. +- `[in] data`: User-provided data context. This will be passed back into the +function when invoked later. +- `[out] result`: `napi_value` representing the JavaScript function object for +the newly created function. + +Returns `napi_ok` if the API succeeded. + +This API allows an add-on author to create a function object in native code. +This is the primary mechanism to allow calling *into* the add-on's native code +*from* Javascript. + +**Note:** The newly created function is not automatically visible from +script after this call. Instead, a property must be explicitly set on any +object that is visible to JavaScript, in order for the function to be accessible +from script. + +In order to expose a function as part of the +add-on's module exports, set the newly created function on the exports +object. A sample module might look as follows: +```C +void SayHello(napi_env env, napi_callback_info info) { + printf("Hello\n"); +} + +void Init(napi_env env, napi_value exports, napi_value module, void* priv) { + napi_status status; + + napi_value fn; + status = napi_create_function(env, NULL, SayHello, NULL, &fn); + if (status != napi_ok) return; + + status = napi_set_named_property(env, exports, "sayHello", fn); + if (status != napi_ok) return; +} + +NAPI_MODULE(addon, Init) +``` + +Given the above code, the add-on can be used from JavaScript as follows: +```js +const myaddon = require('./addon'); +myaddon.sayHello(); +``` + +**Note:** The string passed to require is not necessarily the name passed into +`NAPI_MODULE` in the earlier snippet but the name of the target in `binding.gyp` +responsible for creating the `.node` file. + +### *napi_get_cb_info* + +```C +napi_status napi_get_cb_info(napi_env env, + napi_callback_info cbinfo, + size_t* argc, + napi_value* argv, + napi_value* thisArg, + void** data) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] cbinfo`: The callback info passed into the callback function. +- `[in-out] argc`: Specifies the size of the provided `argv` array +and receives the actual count of arguments. +- `[out] argv`: Buffer to which the `napi_value` representing the +arguments are copied. If there are more arguments than the provided +count, only the requested number of arguments are copied. If there are fewer +arguments provided than claimed, the rest of `argv` is filled with `napi_value` +values that represent `undefined`. +- `[out] this`: Receives the JavaScript `this` argument for the call. +- `[out] data`: Receives the data pointer for the callback. + +Returns `napi_ok` if the API succeeded. + +This method is used within a callback function to retrieve details about the +call like the arguments and the `this` pointer from a given callback info. + +### *napi_is_construct_call* + +```C +napi_status napi_is_construct_call(napi_env env, + napi_callback_info cbinfo, + bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] cbinfo`: The callback info passed into the callback function. +- `[out] result`: Whether the native function is being invoked as +a constructor call. + +Returns `napi_ok` if the API succeeded. + +This API checks if the the current callback was due to a +consructor call. + +### *napi_new_instance* + +```C +napi_status napi_new_instance(napi_env env, + napi_value cons, + size_t argc, + napi_value* argv, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] cons`: `napi_value` representing the JavaScript function +to be invoked as a constructor. +- `[in] argc`: The count of elements in the `argv` array. +- `[in] argv`: Array of JavaScript values as `napi_value` +representing the arguments to the constructor. +- `[out] result`: `napi_value` representing the JavaScript object returned, +which in this case is the constructed object. + +This method is used to instantiate a new JavaScript value using a given +`napi_value` that represents the constructor for the object. For example, +consider the following snippet: +```js +function MyObject(param) { + this.param = param; +} + +const arg = 'hello'; +const value = new MyObject(arg); +``` + +The following can be approximated in N-API using the following snippet: +```C +// Get the constructor function MyObject +napi_value global, constructor, arg, value; +napi_status status = napi_get_global(env, &global); +if (status != napi_ok) return; + +status = napi_get_named_property(env, global, "MyObject", &constructor); +if (status != napi_ok) return; + +// const arg = "hello" +status = napi_create_string_utf8(env, "hello", -1, &arg); +if (status != napi_ok) return; + +napi_value* argv = &arg; +size_t argc = 1; + +// const value = new MyObject(arg) +status = napi_new_instance(env, constructor, argc, argv, &value); +``` + +Returns `napi_ok` if the API succeeded. + +### *napi_make_callback* + +```C +napi_status napi_make_callback(napi_env env, + napi_value recv, + napi_value func, + int argc, + const napi_value* argv, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] recv`: The `this` object passed to the called function. +- `[in] func`: `napi_value` representing the JavaScript function +to be invoked. +- `[in] argc`: The count of elements in the `argv` array. +- `[in] argv`: Array of JavaScript values as `napi_value` +representing the arguments to the function. +- `[out] result`: `napi_value` representing the JavaScript object returned. + +Returns `napi_ok` if the API succeeded. + +This method allows a JavaScript function object to be called from a native +add-on. This API is similar to `napi_call_function`. However, it is used to call +*from* native code back *into* JavaScript *after* returning from an async +operation (when there is no other script on the stack). It is a fairly simple +wrapper around `node::MakeCallback`. + +For an example on how to use `napi_make_callback`, see the section on +[Asynchronous Operations][]. + +## Object Wrap + +N-API offers a way to "wrap" C++ classes and instances so that the class +constructor and methods can be called from JavaScript. + + 1. The [`napi_define_class`][] API defines a JavaScript class with constructor, + static properties and methods, and instance properties and methods that + correspond to the The C++ class. + 2. When JavaScript code invokes the constructor, the constructor callback + uses [`napi_wrap`][] to wrap a new C++ instance in a JavaScript object, + then returns the wrapper object. + 3. When JavaScript code invokes a method or property accessor on the class, + the corresponding `napi_callback` C++ function is invoked. For an instance + callback, [`napi_unwrap`][] obtains the C++ instance that is the target of + the call. + +### *napi_define_class* + +```C +napi_status napi_define_class(napi_env env, + const char* utf8name, + napi_callback constructor, + void* data, + size_t property_count, + const napi_property_descriptor* properties, + napi_value* result); +``` + + - `[in] env`: The environment that the API is invoked under. + - `[in] utf8name`: Name of the JavaScript constructor function; this is + not required to be the same as the C++ class name, though it is recommended + for clarity. + - `[in] constructor`: Callback function that handles constructing instances + of the class. (This should be a static method on the class, not an actual + C++ constructor function.) + - `[in] data`: Optional data to be passed to the constructor callback as + the `data` property of the callback info. + - `[in] property_count`: Number of items in the `properties` array argument. + - `[in] properties`: Array of property descriptors describing static and + instance data properties, accessors, and methods on the class + See `napi_property_descriptor`. + - `[out] result`: A `napi_value` representing the constructor function for + the class. + +Returns `napi_ok` if the API succeeded. + +Defines a JavaScript class that corresponds to a C++ class, including: + - A JavaScript constructor function that has the class name and invokes the + provided C++ constructor callback. + - Properties on the constructor function corresponding to _static_ data + properties, accessors, and methods of the C++ class (defined by + property descriptors with the `napi_static` attribute). + - Properties on the constructor function's `prototype` object corresponding to + _non-static_ data properties, accessors, and methods of the C++ class + (defined by property descriptors without the `napi_static` attribute). + +The C++ constructor callback should be a static method on the class that calls +the actual class constructor, then wraps the new C++ instance in a JavaScript +object, and returns the wrapper object. See `napi_wrap()` for details. + +The JavaScript constructor function returned from [`napi_define_class`][] is +often saved and used later, to construct new instances of the class from native +code, and/or check whether provided values are instances of the class. In that +case, to prevent the function value from being garbage-collected, create a +persistent reference to it using [`napi_create_reference`][] and ensure the +reference count is kept >= 1. + +### *napi_wrap* + +```C +napi_status napi_wrap(napi_env env, + napi_value js_object, + void* native_object, + napi_finalize finalize_cb, + void* finalize_hint, + napi_ref* result); +``` + + - `[in] env`: The environment that the API is invoked under. + - `[in] js_object`: The JavaScript object that will be the wrapper for the + native object. This object _must_ have been created from the `prototype` of + a constructor that was created using `napi_define_class()`. + - `[in] native_object`: The native instance that will be wrapped in the + JavaScript object. + - `[in] finalize_cb`: Optional native callback that can be used to free the + native instance when the JavaScript object is ready for garbage-collection. + - `[in] finalize_hint`: Optional contextual hint that is passed to the + finalize callback. + - `[out] result`: Optional reference to the wrapped object. + +Returns `napi_ok` if the API succeeded. + +Wraps a native instance in JavaScript object of the corresponding type. + +When JavaScript code invokes a constructor for a class that was defined using +`napi_define_class()`, the `napi_callback` for the constructor is invoked. +After constructing an instance of the native class, the callback must then call +`napi_wrap()` to wrap the newly constructed instance in the already-created +JavaScript object that is the `this` argument to the constructor callback. +(That `this` object was created from the constructor function's `prototype`, +so it already has definitions of all the instance properties and methods.) + +Typically when wrapping a class instance, a finalize callback should be +provided that simply deletes the native instance that is received as the `data` +argument to the finalize callback. + +The optional returned reference is initially a weak reference, meaning it +has a reference count of 0. Typically this reference count would be incremented +temporarily during async operations that require the instance to remain valid. + +Caution: The optional returned reference (if obtained) should be deleted via +[`napi_delete_reference`][] ONLY in response to the finalize callback invocation. +(If it is deleted before then, then the finalize callback may never be +invoked.) Therefore when obtaining a reference a finalize callback is also +required in order to enable correct proper of the reference. + +### *napi_unwrap* + +```C +napi_status napi_unwrap(napi_env env, + napi_value js_object, + void** result); +``` + + - `[in] env`: The environment that the API is invoked under. + - `[in] js_object`: The object associated with the C++ class instance. + - `[out] result`: Pointer to the wrapped C++ class instance. + +Returns `napi_ok` if the API succeeded. + +When JavaScript code invokes a method or property accessor on the class, the +corresponding `napi_callback` is invoked. If the callback is for an instance +method or accessor, then the `this` argument to the callback is the wrapper +object; the wrapped C++ instance that is the target of the call can be obtained +then by calling `napi_unwrap()` on the wrapper object. + +## Asynchronous Operations + +Addon modules often need to leverage async helpers from libuv as part of their +implementation. This allows them to schedule work to be executed asynchronously +so that their methods can return in advance of the work being completed. This +is important in order to allow them to avoid blocking overall execution +of the Node.js application. + +N-API provides an ABI-stable interface for these +supporting functions which covers the most common asynchronous use cases. + +N-API defines the `napi_work` structure which is used to manage +asynchronous workers. Instances are created/deleted with +[`napi_create_async_work`][] and [`napi_delete_async_work`][]. + +The `execute` and `complete` callbacks are functions that will be +invoked when the executor is ready to execute and when it completes its +task respectively. These functions implement the following interfaces: + +```C +typedef void (*napi_async_execute_callback)(napi_env env, + void* data); +typedef void (*napi_async_complete_callback)(napi_env env, + napi_status status, + void* data); +``` + + +When these methods are invoked, the `data` parameter passed will be the +addon-provided void* data that was passed into the +`napi_create_async_work` call. + +Once created the async worker can be queued +for execution using the [`napi_queue_async_work`][] function: + +```C +NAPI_EXTERN napi_status napi_queue_async_work(napi_env env, + napi_async_work work); +``` + +[`napi_cancel_async_work`][] can be used if the work needs +to be cancelled before the work has started execution. + +After calling [`napi_cancel_async_work`][], the `complete` callback +will be invoked with a status value of `napi_cancelled`. +The work should not be deleted before the `complete` +callback invocation, even when it was cancelled. + +**Note:** As mentioned in the section on memory management, if +the code to be run in the callbacks will create N-API values, then +N-API handle scope functions must be used to create/destroy a +`napi_handle_scope` such that the scope is active when +objects can be created. + + +### napi_create_async_work + +```C +NAPI_EXTERN +napi_status napi_create_async_work(napi_env env, + napi_async_execute_callback execute, + napi_async_complete_callback complete, + void* data, + napi_async_work* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] execute`: The native function which should be called to excute +the logic asynchronously. +- `[in] complete`: The native function which will be called when the +asynchronous logic is comple or is cancelled. +- `[in] data`: User-provided data context. This will be passed back into the +execute and complete functions. +- `[out] result`: `napi_async_work*` which is the handle to the newly created +async work. + +Returns `napi_ok` if the API succeeded. + +This API allocates a work object that is used to execute logic asynchronously. +It should be freed using [`napi_delete_async_work`][] once the work is no longer +required. + +### napi_delete_async_work + +```C +NAPI_EXTERN napi_status napi_delete_async_work(napi_env env, + napi_async_work work); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] work`: The handle returned by the call to `napi_create_async_work`. + +Returns `napi_ok` if the API succeeded. + +This API frees a previously allocated work object. + +### napi_queue_async_work + +```C +NAPI_EXTERN napi_status napi_queue_async_work(napi_env env, + napi_async_work work); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] work`: The handle returned by the call to `napi_create_async_work`. + +Returns `napi_ok` if the API succeeded. + +This API requests that the previously allocated work be scheduled +for execution. + +### napi_cancel_async_work + +```C +NAPI_EXTERN napi_status napi_cancel_async_work(napi_env env, + napi_async_work work); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] work`: The handle returned by the call to `napi_create_async_work`. + +Returns `napi_ok` if the API succeeded. + +This API cancels a previously allocated work, provided +it has not yet been queued for execution. After this function is called +the `complete` callback will be invoked with a status value of +`napi_cancelled`. The work should not be deleted before the `complete` +callback invocation, even when it was cancelled. + + +[Aynchronous Operations]: #n_api_asynchronous_operations +[Basic N-API Data Types]: #n_api_basic_n_api_data_types +[ECMAScript Language Specification]: https://tc39.github.io/ecma262/ +[Error Handling]: #n_api_error_handling +[Module Registration]: #n_api_module_registration +[Native Abstractions for Node.js]: https://github.com/nodejs/nan +[Object Lifetime Management]: #n_api_object_lifetime_management +[Object Wrap]: #n_api_object_wrap +[Section 9.1.6]: https://tc39.github.io/ecma262/#sec-ordinary-object-internal-methods-and-internal-slots-defineownproperty-p-desc +[Section 12.5.5]: https://tc39.github.io/ecma262/#sec-typeof-operator +[Working with JavaScript Functions]: #n_api_working_with_javascript_functions +[Working with JavaScript Properties]: #n_api_working_with_javascript_properties +[Working with JavaScript Values]: #n_api_working_with_javascript_values +[Working with JavaScript Values - Abstract Operations]: #n_api_working_with_javascript_values_abstract_operations + +[`napi_cancel_async_work`]: #n_api_napi_cancel_async_work +[`napi_close_escapable_handle_scope`]: #n_api_napi_close_escapable_handle_scope +[`napi_close_handle_scope`]: #n_api_napi_close_handle_scope +[`napi_create_async_work`]: #n_api_napi_create_async_work +[`napi_create_error`]: #n_api_napi_create_error +[`napi_create_external_arraybuffer`][]: #n_api_napi_create_external_arraybuffer +[`napi_create_range_error`]: #n_api_napi_create_range_error +[`napi_create_reference`]: #n_api_napi_create_reference +[`napi_create_type_error`]: #n_api_napi_create_type_error +[`napi_define_class`]: #n_api_napi_define_class +[`napi_delete_async_work`]: #n_api_napi_delete_async_work +[`napi_define_class`][]: #n_api_napi_define_class +[`napi_delete_reference`]: #n_api_napi_delete_reference +[`napi_escape_handle`]: #n_api_napi_escape_handle +[`napi_get_array_length`]: #n_api_napi_get_array_length +[`napi_get_element`]: #n_api_napi_get_element +[`napi_get_property`]: #n_api_napi_get_property +[`napi_has_property`]: #n_api_napi_has_property +[`napi_set_property`]: #n_api_napi_set_property +[`napi_get_reference_value`]: #n_api_napi_get_reference_value +[`napi_is_error`]: #n_api_napi_is_error +[`napi_is_exception_pending`]: #n_api_napi_is_exception_pending +[`napi_get_last_error_info`]: #n_api_napi_get_last_error_info +[`napi_get_and_clear_last_exception`]: #n_api_napi_get_and_clear_last_exception +[`napi_make_callback`]: #n_api_napi_make_callback +[`napi_open_escapable_handle_scope`]: #n_api_napi_open_escapable_handle_scope +[`napi_open_handle_scope`]: #n_api_napi_open_handle_scope +[`napi_property_descriptor`]: #n_api_napi_property_descriptor +[`napi_queue_async_work`]: #n_api_napi_queue_async_work +[`napi_reference_ref`]: #n_api_napi_reference_ref +[`napi_reference_unref`]: #n_api_napi_reference_unref +[`napi_throw_error`]: #n_api_napi_throw_error +[`napi_throw_range_error`]: #n_api_napi_throw_range_error +[`napi_throw_type_error`]: #n_api_napi_throw_type_error +[`napi_unwrap`]: #n_api_napi_unwrap +[`napi_wrap`]: #n_api_napi_wrap From 5d2afb2174ef15bade62de882cb78cc4f449a59f Mon Sep 17 00:00:00 2001 From: gwer Date: Sun, 23 Apr 2017 02:01:54 +0300 Subject: [PATCH 069/227] test: replace indexOf with includes Start the transition to Array.prototype.includes() and String.prototype.includes(). This commit refactors most of the comparisons of Array.prototype.indexOf() and String.prototype.indexOf() return values with -1 to the former methods in tests. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12604 Refs: https://github.com/nodejs/node/issues/12586 Reviewed-By: Alexey Orlenko Reviewed-By: James M Snell Reviewed-By: Gibson Fahnestock --- test/addons-napi/test_constructor/test.js | 16 ++++++++-------- test/addons-napi/test_properties/test.js | 16 ++++++++-------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/test/addons-napi/test_constructor/test.js b/test/addons-napi/test_constructor/test.js index 92440bf49e74c1..26083db7a28a21 100644 --- a/test/addons-napi/test_constructor/test.js +++ b/test/addons-napi/test_constructor/test.js @@ -22,14 +22,14 @@ const propertyNames = []; for (const name in test_object) { propertyNames.push(name); } -assert.ok(propertyNames.indexOf('echo') >= 0); -assert.ok(propertyNames.indexOf('readwriteValue') >= 0); -assert.ok(propertyNames.indexOf('readonlyValue') >= 0); -assert.ok(propertyNames.indexOf('hiddenValue') < 0); -assert.ok(propertyNames.indexOf('readwriteAccessor1') < 0); -assert.ok(propertyNames.indexOf('readwriteAccessor2') < 0); -assert.ok(propertyNames.indexOf('readonlyAccessor1') < 0); -assert.ok(propertyNames.indexOf('readonlyAccessor2') < 0); +assert.ok(propertyNames.includes('echo')); +assert.ok(propertyNames.includes('readwriteValue')); +assert.ok(propertyNames.includes('readonlyValue')); +assert.ok(!propertyNames.includes('hiddenValue')); +assert.ok(!propertyNames.includes('readwriteAccessor1')); +assert.ok(!propertyNames.includes('readwriteAccessor2')); +assert.ok(!propertyNames.includes('readonlyAccessor1')); +assert.ok(!propertyNames.includes('readonlyAccessor2')); // The napi_writable attribute should be ignored for accessors. test_object.readwriteAccessor1 = 1; diff --git a/test/addons-napi/test_properties/test.js b/test/addons-napi/test_properties/test.js index 868c603879f1a2..a8127a27860eb3 100644 --- a/test/addons-napi/test_properties/test.js +++ b/test/addons-napi/test_properties/test.js @@ -21,14 +21,14 @@ const propertyNames = []; for (const name in test_object) { propertyNames.push(name); } -assert.ok(propertyNames.indexOf('echo') >= 0); -assert.ok(propertyNames.indexOf('readwriteValue') >= 0); -assert.ok(propertyNames.indexOf('readonlyValue') >= 0); -assert.ok(propertyNames.indexOf('hiddenValue') < 0); -assert.ok(propertyNames.indexOf('readwriteAccessor1') < 0); -assert.ok(propertyNames.indexOf('readwriteAccessor2') < 0); -assert.ok(propertyNames.indexOf('readonlyAccessor1') < 0); -assert.ok(propertyNames.indexOf('readonlyAccessor2') < 0); +assert.ok(propertyNames.includes('echo')); +assert.ok(propertyNames.includes('readwriteValue')); +assert.ok(propertyNames.includes('readonlyValue')); +assert.ok(!propertyNames.includes('hiddenValue')); +assert.ok(!propertyNames.includes('readwriteAccessor1')); +assert.ok(!propertyNames.includes('readwriteAccessor2')); +assert.ok(!propertyNames.includes('readonlyAccessor1')); +assert.ok(!propertyNames.includes('readonlyAccessor2')); // The napi_writable attribute should be ignored for accessors. test_object.readwriteAccessor1 = 1; From b2bf6c873f4c29fdce6c5b22314327df4e93791e Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Thu, 27 Apr 2017 17:27:05 -0700 Subject: [PATCH 070/227] test,lib,doc: use function declarations Replace function expressions with function declarations in preparation for a lint rule requiring function declarations. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12711 Reviewed-By: Vse Mozhet Byt Reviewed-By: Gibson Fahnestock --- test/addons-napi/test_async/test.js | 5 ++--- test/addons-napi/test_exception/test.js | 6 +++--- test/addons-napi/test_instanceof/test.js | 4 ++-- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/test/addons-napi/test_async/test.js b/test/addons-napi/test_async/test.js index 7c140d79fc054f..2b4577624a371e 100644 --- a/test/addons-napi/test_async/test.js +++ b/test/addons-napi/test_async/test.js @@ -6,8 +6,7 @@ const test_async = require(`./build/${common.buildType}/test_async`); test_async.Test(5, common.mustCall(function(err, val) { assert.strictEqual(err, null); assert.strictEqual(val, 10); - process.nextTick(common.mustCall(function() {})); + process.nextTick(common.mustCall()); })); -const cancelSuceeded = function() {}; -test_async.TestCancel(common.mustCall(cancelSuceeded)); +test_async.TestCancel(common.mustCall()); diff --git a/test/addons-napi/test_exception/test.js b/test/addons-napi/test_exception/test.js index 83d2b5000ec54a..94f9566a4b341f 100644 --- a/test/addons-napi/test_exception/test.js +++ b/test/addons-napi/test_exception/test.js @@ -4,12 +4,12 @@ const common = require('../../common'); const test_exception = require(`./build/${common.buildType}/test_exception`); const assert = require('assert'); const theError = new Error('Some error'); -const throwTheError = function() { +function throwTheError() { throw theError; -}; +} let caughtError; -const throwNoError = function() {}; +const throwNoError = common.noop; // Test that the native side successfully captures the exception let returnedError = test_exception.returnException(throwTheError); diff --git a/test/addons-napi/test_instanceof/test.js b/test/addons-napi/test_instanceof/test.js index 38d17031e9a6c9..418149d1909e6f 100644 --- a/test/addons-napi/test_instanceof/test.js +++ b/test/addons-napi/test_instanceof/test.js @@ -57,14 +57,14 @@ if (typeof Symbol !== 'undefined' && 'hasInstance' in Symbol && (theObject instanceof theConstructor)); } - const MyClass = function MyClass() {}; + function MyClass() {} Object.defineProperty(MyClass, Symbol.hasInstance, { value: function(candidate) { return 'mark' in candidate; } }); - const MySubClass = function MySubClass() {}; + function MySubClass() {} MySubClass.prototype = new MyClass(); let x = new MySubClass(); From 7507d1e0e66dc69852f0a230d01080fab4ac4c08 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Thu, 27 Apr 2017 22:15:36 +0300 Subject: [PATCH 071/227] n-api: remove unnecessary try-catch bracket from certain APIs These APIs do not need a try-catch around their body, because no exceptions are thrown in their implementation: - `napi_is_array()` - `napi_get_value_string_latin1()` - `napi_get_value_string_utf8()` - `napi_get_value_string_utf16()` - `napi_get_value_external()` - `napi_is_buffer()` - `napi_is_arraybuffer()` - `napi_get_arraybuffer_info()` - `napi_is_typedarray()` - `napi_get_typedarray_info()` Fixes: https://github.com/nodejs/abi-stable-node/issues/238 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12705 Reviewed-By: Michael Dawson Reviewed-By: Colin Ihrig Reviewed-By: Jason Ginchereau Reviewed-By: Anna Henningsen --- src/node_api.cc | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/src/node_api.cc b/src/node_api.cc index d1ca7bec27c269..4113124b093419 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -1209,14 +1209,14 @@ napi_status napi_define_properties(napi_env env, } napi_status napi_is_array(napi_env env, napi_value value, bool* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, value); CHECK_ARG(env, result); v8::Local val = v8impl::V8LocalValueFromJsValue(value); *result = val->IsArray(); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_get_array_length(napi_env env, @@ -1777,7 +1777,7 @@ napi_status napi_get_value_string_latin1(napi_env env, char* buf, size_t bufsize, size_t* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, value); v8::Local val = v8impl::V8LocalValueFromJsValue(value); @@ -1797,7 +1797,7 @@ napi_status napi_get_value_string_latin1(napi_env env, } } - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } // Copies a JavaScript string into a UTF-8 string buffer. The result is the @@ -1813,7 +1813,7 @@ napi_status napi_get_value_string_utf8(napi_env env, char* buf, size_t bufsize, size_t* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, value); v8::Local val = v8impl::V8LocalValueFromJsValue(value); @@ -1833,7 +1833,7 @@ napi_status napi_get_value_string_utf8(napi_env env, } } - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } // Copies a JavaScript string into a UTF-16 string buffer. The result is the @@ -1849,7 +1849,7 @@ napi_status napi_get_value_string_utf16(napi_env env, char16_t* buf, size_t bufsize, size_t* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, value); v8::Local val = v8impl::V8LocalValueFromJsValue(value); @@ -1870,7 +1870,7 @@ napi_status napi_get_value_string_utf16(napi_env env, } } - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_coerce_to_object(napi_env env, @@ -2024,13 +2024,13 @@ napi_status napi_create_external(napi_env env, *result = v8impl::JsValueFromV8LocalValue(external_value); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_get_value_external(napi_env env, napi_value value, void** result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, value); CHECK_ARG(env, result); @@ -2040,7 +2040,7 @@ napi_status napi_get_value_external(napi_env env, v8::Local external_value = val.As(); *result = external_value->Value(); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } // Set initial_refcount to 0 for a weak reference, >0 for a strong reference. @@ -2481,12 +2481,12 @@ napi_status napi_create_buffer_copy(napi_env env, } napi_status napi_is_buffer(napi_env env, napi_value value, bool* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, value); CHECK_ARG(env, result); *result = node::Buffer::HasInstance(v8impl::V8LocalValueFromJsValue(value)); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_get_buffer_info(napi_env env, @@ -2510,14 +2510,14 @@ napi_status napi_get_buffer_info(napi_env env, } napi_status napi_is_arraybuffer(napi_env env, napi_value value, bool* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, value); CHECK_ARG(env, result); v8::Local val = v8impl::V8LocalValueFromJsValue(value); *result = val->IsArrayBuffer(); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_create_arraybuffer(napi_env env, @@ -2574,7 +2574,7 @@ napi_status napi_get_arraybuffer_info(napi_env env, napi_value arraybuffer, void** data, size_t* byte_length) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, arraybuffer); v8::Local value = v8impl::V8LocalValueFromJsValue(arraybuffer); @@ -2591,18 +2591,18 @@ napi_status napi_get_arraybuffer_info(napi_env env, *byte_length = contents.ByteLength(); } - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_is_typedarray(napi_env env, napi_value value, bool* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, value); CHECK_ARG(env, result); v8::Local val = v8impl::V8LocalValueFromJsValue(value); *result = val->IsTypedArray(); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_create_typedarray(napi_env env, @@ -2664,7 +2664,7 @@ napi_status napi_get_typedarray_info(napi_env env, void** data, napi_value* arraybuffer, size_t* byte_offset) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, typedarray); v8::Local value = v8impl::V8LocalValueFromJsValue(typedarray); @@ -2712,7 +2712,7 @@ napi_status napi_get_typedarray_info(napi_env env, *byte_offset = array->ByteOffset(); } - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } namespace uvimpl { From 1785f3cf447d9c0db1f71a5a5e5c5007f3f3d7d1 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 28 Apr 2017 15:56:21 -0400 Subject: [PATCH 072/227] test: fix warning in n-api reference test Add cast to avoid warning during build of addon. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12730 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen --- test/addons-napi/test_reference/test_reference.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/addons-napi/test_reference/test_reference.c b/test/addons-napi/test_reference/test_reference.c index 1a238a560aac53..9be9fd135fb0e9 100644 --- a/test/addons-napi/test_reference/test_reference.c +++ b/test/addons-napi/test_reference/test_reference.c @@ -60,10 +60,10 @@ napi_value CheckExternal(napi_env env, napi_callback_info info) { NAPI_ASSERT(env, argtype == napi_external, "Expected an external value.") - int* data; + void* data; NAPI_CALL(env, napi_get_value_external(env, arg, &data)); - NAPI_ASSERT(env, data != NULL && *data == test_value, + NAPI_ASSERT(env, data != NULL && *(int*)data == test_value, "An external data value of 1 was expected.") return NULL; From f09677fdba9e40545205cfffd0a4232e84145e99 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 28 Apr 2017 15:28:34 -0400 Subject: [PATCH 073/227] test: add coverage for error apis Add coverage for N-API functions related to throwing and creating errors. A number of these are currently showing as not having any coverage in the nightly code coverage reports. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12729 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- test/addons-napi/test_error/test.js | 34 ++++++++++++++ test/addons-napi/test_error/test_error.cc | 55 +++++++++++++++++++++++ 2 files changed, 89 insertions(+) diff --git a/test/addons-napi/test_error/test.js b/test/addons-napi/test_error/test.js index 521c29250d5ffa..f7479f2c9a64d8 100644 --- a/test/addons-napi/test_error/test.js +++ b/test/addons-napi/test_error/test.js @@ -55,3 +55,37 @@ assert.strictEqual(test_error.checkError({}), false, // Test that non-error primitive is correctly classed assert.strictEqual(test_error.checkError('non-object'), false, 'Non-error primitive correctly classed by napi_is_error'); + +assert.throws(() => { + test_error.throwExistingError(); +}, /^Error: existing error$/); + +assert.throws(() => { + test_error.throwError(); +}, /^Error: error$/); + +assert.throws(() => { + test_error.throwRangeError(); +}, /^RangeError: range error$/); + +assert.throws(() => { + test_error.throwTypeError(); +}, /^TypeError: type error$/); + +let error = test_error.createError(); +assert.ok(error instanceof Error, 'expected error to be an instance of Error'); +assert.strictEqual(error.message, 'error', 'expected message to be "error"'); + +error = test_error.createRangeError(); +assert.ok(error instanceof RangeError, + 'expected error to be an instance of RangeError'); +assert.strictEqual(error.message, + 'range error', + 'expected message to be "range error"'); + +error = test_error.createTypeError(); +assert.ok(error instanceof TypeError, + 'expected error to be an instance of TypeError'); +assert.strictEqual(error.message, + 'type error', + 'expected message to be "type error"'); diff --git a/test/addons-napi/test_error/test_error.cc b/test/addons-napi/test_error/test_error.cc index eb616cac371b0c..ddba2059f23be6 100644 --- a/test/addons-napi/test_error/test_error.cc +++ b/test/addons-napi/test_error/test_error.cc @@ -15,9 +15,64 @@ napi_value checkError(napi_env env, napi_callback_info info) { return result; } +napi_value throwExistingError(napi_env env, napi_callback_info info) { + napi_value message; + napi_value error; + NAPI_CALL(env, napi_create_string_utf8(env, "existing error", -1, &message)); + NAPI_CALL(env, napi_create_error(env, message, &error)); + NAPI_CALL(env, napi_throw(env, error)); + return nullptr; +} + +napi_value throwError(napi_env env, napi_callback_info info) { + NAPI_CALL(env, napi_throw_error(env, "error")); + return nullptr; +} + +napi_value throwRangeError(napi_env env, napi_callback_info info) { + NAPI_CALL(env, napi_throw_range_error(env, "range error")); + return nullptr; +} + +napi_value throwTypeError(napi_env env, napi_callback_info info) { + NAPI_CALL(env, napi_throw_type_error(env, "type error")); + return nullptr; +} + +napi_value createError(napi_env env, napi_callback_info info) { + napi_value result; + napi_value message; + NAPI_CALL(env, napi_create_string_utf8(env, "error", -1, &message)); + NAPI_CALL(env, napi_create_error(env, message, &result)); + return result; +} + +napi_value createRangeError(napi_env env, napi_callback_info info) { + napi_value result; + napi_value message; + NAPI_CALL(env, napi_create_string_utf8(env, "range error", -1, &message)); + NAPI_CALL(env, napi_create_range_error(env, message, &result)); + return result; +} + +napi_value createTypeError(napi_env env, napi_callback_info info) { + napi_value result; + napi_value message; + NAPI_CALL(env, napi_create_string_utf8(env, "type error", -1, &message)); + NAPI_CALL(env, napi_create_type_error(env, message, &result)); + return result; +} + void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_property_descriptor descriptors[] = { DECLARE_NAPI_PROPERTY("checkError", checkError), + DECLARE_NAPI_PROPERTY("throwExistingError", throwExistingError), + DECLARE_NAPI_PROPERTY("throwError", throwError), + DECLARE_NAPI_PROPERTY("throwRangeError", throwRangeError), + DECLARE_NAPI_PROPERTY("throwTypeError", throwTypeError), + DECLARE_NAPI_PROPERTY("createError", createError), + DECLARE_NAPI_PROPERTY("createRangeError", createRangeError), + DECLARE_NAPI_PROPERTY("createTypeError", createTypeError), }; NAPI_CALL_RETURN_VOID(env, napi_define_properties( From dd7665a68eabf4d10e8fb33e8a1cca1db27dbfd5 Mon Sep 17 00:00:00 2001 From: Hitesh Kanwathirtha Date: Thu, 13 Apr 2017 20:21:52 -0700 Subject: [PATCH 074/227] test: port test for make_callback to n-api Improved test coverage for napi_make_callback by porting the existing addons/make_callback test to n-api Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12409 Reviewed-By: Anna Henningsen Reviewed-By: Benjamin Gruenbaum Reviewed-By: James M Snell Reviewed-By: Refael Ackermann --- .../addons-napi/test_make_callback/binding.cc | 48 ++++++ .../test_make_callback/binding.gyp | 9 ++ test/addons-napi/test_make_callback/test.js | 83 ++++++++++ .../test_make_callback_recurse/binding.cc | 32 ++++ .../test_make_callback_recurse/binding.gyp | 9 ++ .../test_make_callback_recurse/test.js | 151 ++++++++++++++++++ 6 files changed, 332 insertions(+) create mode 100644 test/addons-napi/test_make_callback/binding.cc create mode 100644 test/addons-napi/test_make_callback/binding.gyp create mode 100644 test/addons-napi/test_make_callback/test.js create mode 100644 test/addons-napi/test_make_callback_recurse/binding.cc create mode 100644 test/addons-napi/test_make_callback_recurse/binding.gyp create mode 100644 test/addons-napi/test_make_callback_recurse/test.js diff --git a/test/addons-napi/test_make_callback/binding.cc b/test/addons-napi/test_make_callback/binding.cc new file mode 100644 index 00000000000000..987b024098598c --- /dev/null +++ b/test/addons-napi/test_make_callback/binding.cc @@ -0,0 +1,48 @@ +#include +#include "../common.h" +#include + +namespace { + +napi_value MakeCallback(napi_env env, napi_callback_info info) { + const int kMaxArgs = 10; + size_t argc = kMaxArgs; + napi_value args[kMaxArgs]; + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); + + NAPI_ASSERT(env, argc > 0, "Wrong number of arguments"); + + napi_value recv = args[0]; + napi_value func = args[1]; + + std::vector argv; + for (size_t n = 2; n < argc; n += 1) { + argv.push_back(args[n]); + } + + napi_valuetype func_type; + + NAPI_CALL(env, napi_typeof(env, func, &func_type)); + + napi_value result; + if (func_type == napi_function) { + NAPI_CALL(env, + napi_make_callback(env, recv, func, argv.size(), argv.data(), &result)); + } else { + NAPI_ASSERT(env, false, "Unexpected argument type"); + } + + return result; +} + +void Init(napi_env env, napi_value exports, napi_value module, void* priv) { + napi_value fn; + NAPI_CALL_RETURN_VOID(env, + napi_create_function(env, NULL, MakeCallback, NULL, &fn)); + NAPI_CALL_RETURN_VOID(env, + napi_set_named_property(env, exports, "makeCallback", fn)); +} + +} // namespace + +NAPI_MODULE(binding, Init) diff --git a/test/addons-napi/test_make_callback/binding.gyp b/test/addons-napi/test_make_callback/binding.gyp new file mode 100644 index 00000000000000..7ede63d94a0d77 --- /dev/null +++ b/test/addons-napi/test_make_callback/binding.gyp @@ -0,0 +1,9 @@ +{ + 'targets': [ + { + 'target_name': 'binding', + 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ], + 'sources': [ 'binding.cc' ] + } + ] +} diff --git a/test/addons-napi/test_make_callback/test.js b/test/addons-napi/test_make_callback/test.js new file mode 100644 index 00000000000000..c4f24872bdb78e --- /dev/null +++ b/test/addons-napi/test_make_callback/test.js @@ -0,0 +1,83 @@ +'use strict'; + +const common = require('../../common'); +const assert = require('assert'); +const vm = require('vm'); +const binding = require(`./build/${common.buildType}/binding`); +const makeCallback = binding.makeCallback; + +function myMultiArgFunc(arg1, arg2, arg3) { + console.log(`MyFunc was called with ${arguments.length} arguments`); + assert.strictEqual(arg1, 1); + assert.strictEqual(arg2, 2); + assert.strictEqual(arg3, 3); + return 42; +} + +assert.strictEqual(42, makeCallback(process, common.mustCall(function() { + assert.strictEqual(0, arguments.length); + assert.strictEqual(this, process); + return 42; +}))); + +assert.strictEqual(42, makeCallback(process, common.mustCall(function(x) { + assert.strictEqual(1, arguments.length); + assert.strictEqual(this, process); + assert.strictEqual(x, 1337); + return 42; +}), 1337)); + +assert.strictEqual(42, + makeCallback(this, + common.mustCall(myMultiArgFunc), 1, 2, 3)); + +// TODO(node-api): napi_make_callback needs to support +// strings passed for the func argument +/* +const recv = { + one: common.mustCall(function() { + assert.strictEqual(0, arguments.length); + assert.strictEqual(this, recv); + return 42; + }), + two: common.mustCall(function(x) { + assert.strictEqual(1, arguments.length); + assert.strictEqual(this, recv); + assert.strictEqual(x, 1337); + return 42; + }), +}; + +assert.strictEqual(42, makeCallback(recv, 'one')); +assert.strictEqual(42, makeCallback(recv, 'two', 1337)); + +// Check that callbacks on a receiver from a different context works. +const foreignObject = vm.runInNewContext('({ fortytwo() { return 42; } })'); +assert.strictEqual(42, makeCallback(foreignObject, 'fortytwo')); +*/ + +// Check that the callback is made in the context of the receiver. +const target = vm.runInNewContext(` + (function($Object) { + if (Object === $Object) + throw new Error('bad'); + return Object; + }) +`); +assert.notStrictEqual(Object, makeCallback(process, target, Object)); + +// Runs in inner context. +const forward = vm.runInNewContext(` + (function(forward) { + return forward(Object); + }) +`); + +// Runs in outer context. +function endpoint($Object) { + if (Object === $Object) + throw new Error('bad'); + return Object; +} + +assert.strictEqual(Object, makeCallback(process, forward, endpoint)); diff --git a/test/addons-napi/test_make_callback_recurse/binding.cc b/test/addons-napi/test_make_callback_recurse/binding.cc new file mode 100644 index 00000000000000..3f5a4c28b43524 --- /dev/null +++ b/test/addons-napi/test_make_callback_recurse/binding.cc @@ -0,0 +1,32 @@ +#include +#include "../common.h" +#include + +namespace { + +napi_value MakeCallback(napi_env env, napi_callback_info info) { + size_t argc = 2; + napi_value args[2]; + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); + + napi_value recv = args[0]; + napi_value func = args[1]; + + napi_make_callback(env, + recv, func, 0 /* argc */, nullptr /* argv */, nullptr /* result */); + + return recv; +} + +void Init(napi_env env, napi_value exports, napi_value module, void* priv) { + napi_value fn; + NAPI_CALL_RETURN_VOID(env, + napi_create_function(env, NULL, MakeCallback, NULL, &fn)); + NAPI_CALL_RETURN_VOID(env, + napi_set_named_property(env, exports, "makeCallback", fn)); +} + + +} // namespace + +NAPI_MODULE(binding, Init) diff --git a/test/addons-napi/test_make_callback_recurse/binding.gyp b/test/addons-napi/test_make_callback_recurse/binding.gyp new file mode 100644 index 00000000000000..7ede63d94a0d77 --- /dev/null +++ b/test/addons-napi/test_make_callback_recurse/binding.gyp @@ -0,0 +1,9 @@ +{ + 'targets': [ + { + 'target_name': 'binding', + 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ], + 'sources': [ 'binding.cc' ] + } + ] +} diff --git a/test/addons-napi/test_make_callback_recurse/test.js b/test/addons-napi/test_make_callback_recurse/test.js new file mode 100644 index 00000000000000..895769bc33029a --- /dev/null +++ b/test/addons-napi/test_make_callback_recurse/test.js @@ -0,0 +1,151 @@ +'use strict'; + +const common = require('../../common'); +const assert = require('assert'); +const domain = require('domain'); +const binding = require(`./build/${common.buildType}/binding`); +const makeCallback = binding.makeCallback; + +// Make sure this is run in the future. +const mustCallCheckDomains = common.mustCall(checkDomains); + + +// Make sure that using MakeCallback allows the error to propagate. +assert.throws(function() { + makeCallback({}, function() { + throw new Error('hi from domain error'); + }); +}, /^Error: hi from domain error$/); + + +// Check the execution order of the nextTickQueue and MicrotaskQueue in +// relation to running multiple MakeCallback's from bootstrap, +// node::MakeCallback() and node::AsyncWrap::MakeCallback(). +// TODO(trevnorris): Is there a way to verify this is being run during +// bootstrap? +(function verifyExecutionOrder(arg) { + const results = []; + + // Processing of the MicrotaskQueue is manually handled by node. They are not + // processed until after the nextTickQueue has been processed. + Promise.resolve(1).then(common.mustCall(function() { + results.push(7); + })); + + // The nextTick should run after all immediately invoked calls. + process.nextTick(common.mustCall(function() { + results.push(3); + + // Run same test again but while processing the nextTickQueue to make sure + // the following MakeCallback call breaks in the middle of processing the + // queue and allows the script to run normally. + process.nextTick(common.mustCall(function() { + results.push(6); + })); + + makeCallback({}, common.mustCall(function() { + results.push(4); + })); + + results.push(5); + })); + + results.push(0); + + // MakeCallback is calling the function immediately, but should then detect + // that a script is already in the middle of execution and return before + // either the nextTickQueue or MicrotaskQueue are processed. + makeCallback({}, common.mustCall(function() { + results.push(1); + })); + + // This should run before either the nextTickQueue or MicrotaskQueue are + // processed. Previously MakeCallback would not detect this circumstance + // and process them immediately. + results.push(2); + + setImmediate(common.mustCall(function() { + for (let i = 0; i < results.length; i++) { + assert.strictEqual(results[i], i, + `verifyExecutionOrder(${arg}) results: ${results}`); + } + if (arg === 1) { + // The tests are first run on bootstrap during LoadEnvironment() in + // src/node.cc. Now run the tests through node::MakeCallback(). + setImmediate(function() { + makeCallback({}, common.mustCall(function() { + verifyExecutionOrder(2); + })); + }); + } else if (arg === 2) { + // setTimeout runs via the TimerWrap, which runs through + // AsyncWrap::MakeCallback(). Make sure there are no conflicts using + // node::MakeCallback() within it. + setTimeout(common.mustCall(function() { + verifyExecutionOrder(3); + }), 10); + } else if (arg === 3) { + mustCallCheckDomains(); + } else { + throw new Error('UNREACHABLE'); + } + })); +}(1)); + + +function checkDomains() { + // Check that domains are properly entered/exited when called in multiple + // levels from both node::MakeCallback() and AsyncWrap::MakeCallback + setImmediate(common.mustCall(function() { + const d1 = domain.create(); + const d2 = domain.create(); + const d3 = domain.create(); + + makeCallback({domain: d1}, common.mustCall(function() { + assert.strictEqual(d1, process.domain); + makeCallback({domain: d2}, common.mustCall(function() { + assert.strictEqual(d2, process.domain); + makeCallback({domain: d3}, common.mustCall(function() { + assert.strictEqual(d3, process.domain); + })); + assert.strictEqual(d2, process.domain); + })); + assert.strictEqual(d1, process.domain); + })); + })); + + setTimeout(common.mustCall(function() { + const d1 = domain.create(); + const d2 = domain.create(); + const d3 = domain.create(); + + makeCallback({domain: d1}, common.mustCall(function() { + assert.strictEqual(d1, process.domain); + makeCallback({domain: d2}, common.mustCall(function() { + assert.strictEqual(d2, process.domain); + makeCallback({domain: d3}, common.mustCall(function() { + assert.strictEqual(d3, process.domain); + })); + assert.strictEqual(d2, process.domain); + })); + assert.strictEqual(d1, process.domain); + })); + }), 1); + + function testTimer(id) { + // Make sure nextTick, setImmediate and setTimeout can all recover properly + // after a thrown makeCallback call. + const d = domain.create(); + d.on('error', common.mustCall(function(e) { + assert.strictEqual(e.message, `throw from domain ${id}`); + })); + makeCallback({domain: d}, function() { + throw new Error(`throw from domain ${id}`); + }); + throw new Error('UNREACHABLE'); + } + + process.nextTick(common.mustCall(testTimer), 3); + setImmediate(common.mustCall(testTimer), 2); + setTimeout(common.mustCall(testTimer), 1, 1); +} From ce03977f30b8959817cd4d160e63572d5ea32a58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Fri, 5 May 2017 14:43:54 +0200 Subject: [PATCH 075/227] test: fix napi test_reference for recent V8 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12864 Ref: https://github.com/nodejs/node/pull/12551#issuecomment-297949361 Reviewed-By: James M Snell Reviewed-By: Michael Dawson --- test/addons-napi/test_reference/test.js | 29 ++++++++++++++----------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/test/addons-napi/test_reference/test.js b/test/addons-napi/test_reference/test.js index ddfec58f1f9d7d..30effe7eec0922 100644 --- a/test/addons-napi/test_reference/test.js +++ b/test/addons-napi/test_reference/test.js @@ -33,19 +33,6 @@ assert.strictEqual(test_reference.finalizeCount, 0); assert.strictEqual(test_reference.finalizeCount, 1); } -{ - // Weak reference - let value = test_reference.createExternalWithFinalize(); - assert.strictEqual(test_reference.finalizeCount, 0); - test_reference.createReference(value, 0); - assert.strictEqual(test_reference.referenceValue, value); - value = null; - global.gc(); // Value should be GC'd because there is only a weak ref - assert.strictEqual(test_reference.referenceValue, undefined); - assert.strictEqual(test_reference.finalizeCount, 1); - test_reference.deleteReference(); -} - { // Strong reference let value = test_reference.createExternalWithFinalize(); @@ -85,3 +72,19 @@ assert.strictEqual(test_reference.finalizeCount, 0); global.gc(); // Value was already GC'd assert.strictEqual(test_reference.finalizeCount, 1); } + +{ + // Weak reference + let value = test_reference.createExternalWithFinalize(); + assert.strictEqual(test_reference.finalizeCount, 0); + test_reference.createReference(value, 0); + assert.strictEqual(test_reference.referenceValue, value); + value = null; + setImmediate(common.mustCall(() => { + // This test only works if gc() is called from an immediate callback. + global.gc(); // Value should be GC'd because there is only a weak ref + assert.strictEqual(test_reference.referenceValue, undefined); + assert.strictEqual(test_reference.finalizeCount, 1); + test_reference.deleteReference(); + })); +} From 2e36365d569b15f2859c88a488305577fe0fc907 Mon Sep 17 00:00:00 2001 From: Jason Ginchereau Date: Fri, 5 May 2017 11:38:21 -0700 Subject: [PATCH 076/227] n-api: napi_get_cb_info should fill array When the number of args requested is greater than the actual number of args supplied to the function call, the remainder of the args array should be filled in with `undefined` values. Because of this bug, the remainder of the array was left uninitialized, which could cause a crash. Refer to the documentation for the `argv` parameter at https://github.com/nodejs/node/blob/master/doc/api/n-api.md#napi_get_cb_info Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12863 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell Reviewed-By: Michael Dawson --- src/node_api.cc | 2 +- test/addons-napi/3_callbacks/binding.c | 17 +++++++++++++++-- test/addons-napi/test_function/test_function.c | 2 +- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/node_api.cc b/src/node_api.cc index 4113124b093419..2f8409fc81627c 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -1526,7 +1526,7 @@ napi_status napi_get_cb_info( if (argv != nullptr) { CHECK_ARG(env, argc); - info->Args(argv, std::min(*argc, info->ArgsLength())); + info->Args(argv, *argc); } if (argc != nullptr) { *argc = info->ArgsLength(); diff --git a/test/addons-napi/3_callbacks/binding.c b/test/addons-napi/3_callbacks/binding.c index 47360bd979ffc4..8640a936107d47 100644 --- a/test/addons-napi/3_callbacks/binding.c +++ b/test/addons-napi/3_callbacks/binding.c @@ -3,10 +3,23 @@ #include napi_value RunCallback(napi_env env, napi_callback_info info) { - size_t argc = 1; - napi_value args[1]; + size_t argc = 2; + napi_value args[2]; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); + NAPI_ASSERT(env, argc == 1, + "Wrong number of arguments. Expects a single argument."); + + napi_valuetype valuetype0; + NAPI_CALL(env, napi_typeof(env, args[0], &valuetype0)); + NAPI_ASSERT(env, valuetype0 == napi_function, + "Wrong type of arguments. Expects a function as first argument."); + + napi_valuetype valuetype1; + NAPI_CALL(env, napi_typeof(env, args[1], &valuetype1)); + NAPI_ASSERT(env, valuetype1 == napi_undefined, + "Additional arguments should be undefined."); + napi_value argv[1]; const char* str = "hello world"; size_t str_len = strlen(str); diff --git a/test/addons-napi/test_function/test_function.c b/test/addons-napi/test_function/test_function.c index 928f99c184cb57..4ce0203e7232dd 100644 --- a/test/addons-napi/test_function/test_function.c +++ b/test/addons-napi/test_function/test_function.c @@ -12,7 +12,7 @@ napi_value Test(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_typeof(env, args[0], &valuetype0)); NAPI_ASSERT(env, valuetype0 == napi_function, - "Wrong type of arguments. Expects a number as first argument."); + "Wrong type of arguments. Expects a function as first argument."); napi_value* argv = args + 1; argc = argc - 1; From a128219a484f74e4e4a5d3db23824fb9b5efb5cd Mon Sep 17 00:00:00 2001 From: Jason Ginchereau Date: Thu, 4 May 2017 15:24:44 -0700 Subject: [PATCH 077/227] n-api: Handle fatal exception in async callback - Create a handle scope before invoking the async completion callback, because it is basically always needed, easy for user code to forget, and this makes it more consistent with ordinary N-API function callbacks. - Check for an unhandled JS exception after invoking an async completion callback, and report it via `node::FatalException()`. - Add a corresponding test case for an exception in async callback. Previously, any unhandled JS exception thrown from a `napi_async_complete_callback` would be silently ignored. Among other things this meant assertions in some test cases could be undetected. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12838 Reviewed-By: Anna Henningsen Reviewed-By: Benjamin Gruenbaum Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- doc/api/n-api.md | 7 ------- src/node_api.cc | 21 ++++++++++++++++++++- test/addons-napi/test_async/test.js | 18 ++++++++++++++++++ test/addons-napi/test_async/test_async.cc | 19 ------------------- 4 files changed, 38 insertions(+), 27 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index f9e47a7b8d32a0..bc0499e4d09447 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -2855,13 +2855,6 @@ will be invoked with a status value of `napi_cancelled`. The work should not be deleted before the `complete` callback invocation, even when it was cancelled. -**Note:** As mentioned in the section on memory management, if -the code to be run in the callbacks will create N-API values, then -N-API handle scope functions must be used to create/destroy a -`napi_handle_scope` such that the scope is active when -objects can be created. - - ### napi_create_async_work -Node.js Addons are dynamically-linked shared objects, written in C or C++, that +Node.js Addons are dynamically-linked shared objects, written in C++, that can be loaded into Node.js using the [`require()`][require] function, and used just as if they were an ordinary Node.js module. They are used primarily to provide an interface between JavaScript running in Node.js and C/C++ libraries. @@ -28,7 +28,7 @@ involving knowledge of several components and APIs : off-loading work via libuv to non-blocking system operations, worker threads or a custom use of libuv's threads. - - Internal Node.js libraries. Node.js itself exports a number of C/C++ APIs + - Internal Node.js libraries. Node.js itself exports a number of C++ APIs that Addons can use — the most important of which is the `node::ObjectWrap` class. diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 7530c0b1b4f4d2..348f2ad04466bd 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -12,7 +12,7 @@ compiled for one version to run on later versions of Node.js without recompilation. Addons are built/packaged with the same approach/tools -outlined in the section titled [C/C++ Addons](addons.html). +outlined in the section titled [C++ Addons](addons.html). The only difference is the set of APIs that are used by the native code. Instead of using the V8 or [Native Abstractions for Node.js][] APIs, the functions available in the N-API are used. From 263a633d5eef0dbf9724ece9861de8123000a56b Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 10 May 2017 19:37:25 -0700 Subject: [PATCH 081/227] test: add common.mustCall() to NAPI exception test Use `common.mustCall()` to confirm that function is invoked. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12959 Reviewed-By: Colin Ihrig Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Gibson Fahnestock Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- test/addons-napi/test_exception/test.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/test/addons-napi/test_exception/test.js b/test/addons-napi/test_exception/test.js index 94f9566a4b341f..ddd1195fa7234a 100644 --- a/test/addons-napi/test_exception/test.js +++ b/test/addons-napi/test_exception/test.js @@ -9,8 +9,6 @@ function throwTheError() { } let caughtError; -const throwNoError = common.noop; - // Test that the native side successfully captures the exception let returnedError = test_exception.returnException(throwTheError); assert.strictEqual(theError, returnedError, @@ -34,13 +32,13 @@ assert.strictEqual(test_exception.wasPending(), true, ' when it was allowed through'); // Test that the native side does not capture a non-existing exception -returnedError = test_exception.returnException(throwNoError); +returnedError = test_exception.returnException(common.mustCall()); assert.strictEqual(undefined, returnedError, 'Returned error is undefined when no exception is thrown'); // Test that no exception appears that was not thrown by us try { - test_exception.allowException(throwNoError); + test_exception.allowException(common.mustCall()); } catch (anError) { caughtError = anError; } From 0f74ee5cbf1faffcb986da37791fbbece28a9b7b Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Thu, 11 May 2017 09:53:35 -0400 Subject: [PATCH 082/227] doc: clarify operation of napi_cancel_async_work Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/12974 Reviewed-By: Gibson Fahnestock Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Jason Ginchereau --- doc/api/n-api.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 348f2ad04466bd..423e71859dea94 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -2931,11 +2931,12 @@ NAPI_EXTERN napi_status napi_cancel_async_work(napi_env env, Returns `napi_ok` if the API succeeded. -This API cancels a previously allocated work, provided -it has not yet been queued for execution. After this function is called +This API cancels queued work if it has not yet +been started. If it has already started executing, it cannot be +cancelled and `napi_generic_failure` will be returned. If successful, the `complete` callback will be invoked with a status value of `napi_cancelled`. The work should not be deleted before the `complete` -callback invocation, even when it was cancelled. +callback invocation, even if it has been successfully cancelled. [Aynchronous Operations]: #n_api_asynchronous_operations From 314f22dcf4e5dc864cddd98f08274080601f0a88 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 12 May 2017 17:26:38 -0400 Subject: [PATCH 083/227] test: improve N-API test coverage Add tests to cover functions that return globals Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13006 Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen Reviewed-By: Jason Ginchereau --- test/addons-napi/test_globals/binding.gyp | 8 ++++++ test/addons-napi/test_globals/test.js | 8 ++++++ test/addons-napi/test_globals/test_globals.c | 26 ++++++++++++++++++++ 3 files changed, 42 insertions(+) create mode 100644 test/addons-napi/test_globals/binding.gyp create mode 100644 test/addons-napi/test_globals/test.js create mode 100644 test/addons-napi/test_globals/test_globals.c diff --git a/test/addons-napi/test_globals/binding.gyp b/test/addons-napi/test_globals/binding.gyp new file mode 100644 index 00000000000000..0160dc72e18017 --- /dev/null +++ b/test/addons-napi/test_globals/binding.gyp @@ -0,0 +1,8 @@ +{ + "targets": [ + { + "target_name": "test_globals", + "sources": [ "test_globals.c" ] + } + ] +} diff --git a/test/addons-napi/test_globals/test.js b/test/addons-napi/test_globals/test.js new file mode 100644 index 00000000000000..a6e5f722cb9379 --- /dev/null +++ b/test/addons-napi/test_globals/test.js @@ -0,0 +1,8 @@ +'use strict'; +const common = require('../../common'); +const assert = require('assert'); + +const test_globals = require(`./build/${common.buildType}/test_globals`); + +assert.strictEqual(test_globals.getUndefined(), undefined); +assert.strictEqual(test_globals.getNull(), null); diff --git a/test/addons-napi/test_globals/test_globals.c b/test/addons-napi/test_globals/test_globals.c new file mode 100644 index 00000000000000..709e42a2e0e6c8 --- /dev/null +++ b/test/addons-napi/test_globals/test_globals.c @@ -0,0 +1,26 @@ +#include +#include "../common.h" + +napi_value getNull(napi_env env, napi_callback_info info) { + napi_value result; + NAPI_CALL(env, napi_get_null(env, &result)); + return result; +} + +napi_value getUndefined(napi_env env, napi_callback_info info) { + napi_value result; + NAPI_CALL(env, napi_get_undefined(env, &result)); + return result; +} + +void Init(napi_env env, napi_value exports, napi_value module, void* priv) { + napi_property_descriptor descriptors[] = { + DECLARE_NAPI_PROPERTY("getUndefined", getUndefined), + DECLARE_NAPI_PROPERTY("getNull", getNull), + }; + + NAPI_CALL_RETURN_VOID(env, napi_define_properties( + env, exports, sizeof(descriptors) / sizeof(*descriptors), descriptors)); +} + +NAPI_MODULE(addon, Init) From 8d3162d9e68b6ae30addde5d0c5e1566e21c7ce6 Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Sat, 13 May 2017 17:36:23 +0200 Subject: [PATCH 084/227] n-api: remove compiler warning `TryCatch` without an `Isolate*` argument is deprecated, so add one. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13014 Reviewed-By: Timothy Gu Reviewed-By: Gibson Fahnestock Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson Reviewed-By: Jason Ginchereau --- src/node_api.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/node_api.cc b/src/node_api.cc index 8600783523c7a5..b0900d1acd12bb 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -2783,7 +2783,7 @@ class Work { // report it as a fatal exception. (There is no JavaScript on the // callstack that can possibly handle it.) if (!env->last_exception.IsEmpty()) { - v8::TryCatch try_catch; + v8::TryCatch try_catch(env->isolate); env->isolate->ThrowException( v8::Local::New(env->isolate, env->last_exception)); node::FatalException(env->isolate, try_catch); From 71aa251671dfb78d82315b9fc11b32a12b66dea8 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Mon, 15 May 2017 20:18:50 -0400 Subject: [PATCH 085/227] test: Improve N-API test coverage - add coverage for napi_get_prototype - add coverage for napi_strict_equals Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13044 Reviewed-By: Colin Ihrig Reviewed-By: Jason Ginchereau --- test/addons-napi/test_general/binding.gyp | 8 +++++ test/addons-napi/test_general/test.js | 32 +++++++++++++++++ test/addons-napi/test_general/test_general.c | 38 ++++++++++++++++++++ 3 files changed, 78 insertions(+) create mode 100644 test/addons-napi/test_general/binding.gyp create mode 100644 test/addons-napi/test_general/test.js create mode 100644 test/addons-napi/test_general/test_general.c diff --git a/test/addons-napi/test_general/binding.gyp b/test/addons-napi/test_general/binding.gyp new file mode 100644 index 00000000000000..f8ef9f59613355 --- /dev/null +++ b/test/addons-napi/test_general/binding.gyp @@ -0,0 +1,8 @@ +{ + "targets": [ + { + "target_name": "test_general", + "sources": [ "test_general.c" ] + } + ] +} diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js new file mode 100644 index 00000000000000..a2e57126ce8c22 --- /dev/null +++ b/test/addons-napi/test_general/test.js @@ -0,0 +1,32 @@ +'use strict'; + +const common = require('../../common'); +const test_general = require(`./build/${common.buildType}/test_general`); +const assert = require('assert'); + +const val1 = '1'; +const val2 = 1; +const val3 = 1; + +class BaseClass { +} + +class ExtendedClass extends BaseClass { +} + +const baseObject = new BaseClass(); +const extendedObject = new ExtendedClass(); + +// test napi_strict_equals +assert.ok(test_general.testStrictEquals(val1, val1)); +assert.strictEqual(test_general.testStrictEquals(val1, val2), false); +assert.ok(test_general.testStrictEquals(val2, val3)); + +// test napi_get_prototype +assert.strictEqual(test_general.testGetPrototype(baseObject), + Object.getPrototypeOf(baseObject)); +assert.strictEqual(test_general.testGetPrototype(extendedObject), + Object.getPrototypeOf(extendedObject)); +assert.ok(test_general.testGetPrototype(baseObject) !== + test_general.testGetPrototype(extendedObject), + 'Prototypes for base and extended should be different'); diff --git a/test/addons-napi/test_general/test_general.c b/test/addons-napi/test_general/test_general.c new file mode 100644 index 00000000000000..17911b36253df1 --- /dev/null +++ b/test/addons-napi/test_general/test_general.c @@ -0,0 +1,38 @@ +#include +#include "../common.h" + +napi_value testStrictEquals(napi_env env, napi_callback_info info) { + size_t argc = 2; + napi_value args[2]; + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); + + bool bool_result; + napi_value result; + NAPI_CALL(env, napi_strict_equals(env, args[0], args[1], &bool_result)); + NAPI_CALL(env, napi_get_boolean(env, bool_result, &result)); + + return result; +} + +napi_value testGetPrototype(napi_env env, napi_callback_info info) { + size_t argc = 1; + napi_value args[1]; + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); + + napi_value result; + NAPI_CALL(env, napi_get_prototype(env, args[0], &result)); + + return result; +} + +void Init(napi_env env, napi_value exports, napi_value module, void* priv) { + napi_property_descriptor descriptors[] = { + DECLARE_NAPI_PROPERTY("testStrictEquals", testStrictEquals), + DECLARE_NAPI_PROPERTY("testGetPrototype", testGetPrototype), + }; + + NAPI_CALL_RETURN_VOID(env, napi_define_properties( + env, exports, sizeof(descriptors) / sizeof(*descriptors), descriptors)); +} + +NAPI_MODULE(addon, Init) From 70281ba1be8d4bc067ac9ec6b5a50f0936eb06fc Mon Sep 17 00:00:00 2001 From: Jason Ginchereau Date: Wed, 17 May 2017 16:56:37 -0700 Subject: [PATCH 086/227] n-api: Retain last code when getting error info Unlike most N-API functions, `napi_get_last_error_info()` should not clear the last error code when successful, because a pointer to (not a copy of) the error info structure is returned via an out parameter. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13087 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson --- src/node_api.cc | 2 +- test/addons-napi/test_napi_status/test_napi_status.cc | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/node_api.cc b/src/node_api.cc index b0900d1acd12bb..976bf15f3ca423 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -757,7 +757,7 @@ napi_status napi_get_last_error_info(napi_env env, error_messages[env->last_error.error_code]; *result = &(env->last_error); - return napi_clear_last_error(env); + return napi_ok; } napi_status napi_create_function(napi_env env, diff --git a/test/addons-napi/test_napi_status/test_napi_status.cc b/test/addons-napi/test_napi_status/test_napi_status.cc index 9046feffd4a15d..9e340aa46e106c 100644 --- a/test/addons-napi/test_napi_status/test_napi_status.cc +++ b/test/addons-napi/test_napi_status/test_napi_status.cc @@ -10,6 +10,14 @@ napi_value createNapiError(napi_env env, napi_callback_info info) { NAPI_ASSERT(env, status != napi_ok, "Failed to produce error condition"); + const napi_extended_error_info *error_info = 0; + NAPI_CALL(env, napi_get_last_error_info(env, &error_info)); + + NAPI_ASSERT(env, error_info->error_code == status, + "Last error info code should match last status"); + NAPI_ASSERT(env, error_info->error_message, + "Last error info message should not be null"); + return nullptr; } From 75e91fe5c8c386ced5addab9cb55326a4e743ba9 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Wed, 17 May 2017 17:16:37 -0400 Subject: [PATCH 087/227] doc: add reference to node_api.h in docs Realized that we don't actually point people to the file to include in order to access N-API functions. Add that. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13084 Reviewed-By: Colin Ihrig Reviewed-By: Gibson Fahnestock Reviewed-By: Luigi Pinca Reviewed-By: Anna Henningsen --- doc/api/n-api.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 423e71859dea94..de841331aba0ad 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -52,6 +52,14 @@ for the N-API C based functions exported by Node.js. These wrappers are not part of N-API, nor will they be maintained as part of Node.js. One such example is: [node-api](https://github.com/nodejs/node-api). +In order to use the N-API functions, include the file +[node_api.h](https://github.com/nodejs/node/blob/master/src/node_api.h) +which is located in the src directory in the node development tree. +For example: +```C +#include +``` + ## Basic N-API Data Types N-API exposes the following fundamental datatypes as abstractions that are From 2e2905266e5f6c5a63c521f8cad473b9d82f6b1c Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 19 May 2017 17:34:11 -0400 Subject: [PATCH 088/227] doc: fix title/function name mismatch Fix mismatch in title for napi_get_value_string_utf16 Fixes: https://github.com/nodejs/abi-stable-node/issues/243 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13123 Reviewed-By: Anna Henningsen Reviewed-By: Refael Ackermann Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Gibson Fahnestock Reviewed-By: Colin Ihrig --- doc/api/n-api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index de841331aba0ad..d3efd09ab15ec0 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -1590,7 +1590,7 @@ x is passed in it returns `napi_string_expected`. This API returns the UTF8-encoded string corresponding the value passed in. -#### *napi_get_value_string_utf16_length* +#### *napi_get_value_string_utf16* From d89afe868593ffe75c4e7f8974ecd779bb28af06 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 19 May 2017 18:18:54 -0400 Subject: [PATCH 089/227] test: increase n-api constructor coverage Add tests to validate that properties marked as static are available through the class as opposed to instances Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13124 Reviewed-By: Jason Ginchereau Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Hitesh Kanwathirtha --- test/addons-napi/test_constructor/test.js | 5 +++++ .../test_constructor/test_constructor.c | 16 ++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/test/addons-napi/test_constructor/test.js b/test/addons-napi/test_constructor/test.js index 26083db7a28a21..75c7b3678306e4 100644 --- a/test/addons-napi/test_constructor/test.js +++ b/test/addons-napi/test_constructor/test.js @@ -40,3 +40,8 @@ test_object.readwriteAccessor2 = 2; assert.strictEqual(test_object.readwriteAccessor2, 2); assert.strictEqual(test_object.readonlyAccessor2, 2); assert.throws(() => { test_object.readonlyAccessor2 = 3; }, TypeError); + +// validate that static properties are on the class as opposed +// to the instance +assert.strictEqual(TestConstructor.staticReadonlyAccessor1, 10); +assert.strictEqual(test_object.staticReadonlyAccessor1, undefined); diff --git a/test/addons-napi/test_constructor/test_constructor.c b/test/addons-napi/test_constructor/test_constructor.c index 0a73010d72f115..220d564753ca10 100644 --- a/test/addons-napi/test_constructor/test_constructor.c +++ b/test/addons-napi/test_constructor/test_constructor.c @@ -2,6 +2,7 @@ #include "../common.h" static double value_ = 1; +static double static_value_ = 10; napi_ref constructor_; napi_value GetValue(napi_env env, napi_callback_info info) { @@ -45,6 +46,19 @@ napi_value New(napi_env env, napi_callback_info info) { return _this; } +napi_value GetStaticValue(napi_env env, napi_callback_info info) { + size_t argc = 0; + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, NULL, NULL, NULL)); + + NAPI_ASSERT(env, argc == 0, "Wrong number of arguments"); + + napi_value number; + NAPI_CALL(env, napi_create_number(env, static_value_, &number)); + + return number; +} + + void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_value number; NAPI_CALL_RETURN_VOID(env, napi_create_number(env, value_, &number)); @@ -58,6 +72,8 @@ void Init(napi_env env, napi_value exports, napi_value module, void* priv) { { "readwriteAccessor2", 0, 0, GetValue, SetValue, 0, napi_writable, 0}, { "readonlyAccessor1", 0, 0, GetValue, NULL, 0, napi_default, 0}, { "readonlyAccessor2", 0, 0, GetValue, NULL, 0, napi_writable, 0}, + { "staticReadonlyAccessor1", 0, 0, GetStaticValue, NULL, 0, + napi_default | napi_static, 0}, }; napi_value cons; From a8c73748dbe5ec15cbf27743c4fe310baaa30ed9 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 24 May 2017 12:49:07 +0200 Subject: [PATCH 090/227] src: correct endif comment SRC_NODE_API_H__ Really minor but I could not find an open PR for anything n-api where this could be changed, so creating this so that it is not forgotten. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13190 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- src/node_api.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/node_api.h b/src/node_api.h index 29c5c513f72f59..5a95c030a35272 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -480,4 +480,4 @@ NAPI_EXTERN napi_status napi_cancel_async_work(napi_env env, EXTERN_C_END -#endif // SRC_NODE_API_H__ +#endif // SRC_NODE_API_H_ From 1829d259079ba640b2323124c17141be9b3b1a60 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Tue, 23 May 2017 17:26:37 -0400 Subject: [PATCH 091/227] test: add coverage for napi_has_named_property Add test to cover napi_has_named_property Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13178 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Jason Ginchereau --- test/addons-napi/test_properties/test.js | 6 +++++ .../test_properties/test_properties.c | 23 +++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/test/addons-napi/test_properties/test.js b/test/addons-napi/test_properties/test.js index a8127a27860eb3..0f37af81b7ffad 100644 --- a/test/addons-napi/test_properties/test.js +++ b/test/addons-napi/test_properties/test.js @@ -39,3 +39,9 @@ test_object.readwriteAccessor2 = 2; assert.strictEqual(test_object.readwriteAccessor2, 2); assert.strictEqual(test_object.readonlyAccessor2, 2); assert.throws(() => { test_object.readonlyAccessor2 = 3; }, TypeError); + +assert.strictEqual(test_object.hasNamedProperty(test_object, 'echo'), true); +assert.strictEqual(test_object.hasNamedProperty(test_object, 'hiddenValue'), + true); +assert.strictEqual(test_object.hasNamedProperty(test_object, 'doesnotexist'), + false); diff --git a/test/addons-napi/test_properties/test_properties.c b/test/addons-napi/test_properties/test_properties.c index 67e6b1a0d5ed0a..575d750a7d936b 100644 --- a/test/addons-napi/test_properties/test_properties.c +++ b/test/addons-napi/test_properties/test_properties.c @@ -37,6 +37,28 @@ napi_value Echo(napi_env env, napi_callback_info info) { return args[0]; } +napi_value HasNamedProperty(napi_env env, napi_callback_info info) { + size_t argc = 2; + napi_value args[2]; + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); + + NAPI_ASSERT(env, argc == 2, "Wrong number of arguments"); + + // Extract the name of the property to check + char buffer[128]; + size_t copied; + NAPI_CALL(env, + napi_get_value_string_utf8(env, args[1], buffer, sizeof(buffer), &copied)); + + // do the check and create the boolean return value + bool value; + napi_value result; + NAPI_CALL(env, napi_has_named_property(env, args[0], buffer, &value)); + NAPI_CALL(env, napi_get_boolean(env, value, &result)); + + return result; +} + void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_value number; NAPI_CALL_RETURN_VOID(env, napi_create_number(env, value_, &number)); @@ -50,6 +72,7 @@ void Init(napi_env env, napi_value exports, napi_value module, void* priv) { { "readwriteAccessor2", 0, 0, GetValue, SetValue, 0, napi_writable, 0}, { "readonlyAccessor1", 0, 0, GetValue, NULL, 0, napi_default, 0}, { "readonlyAccessor2", 0, 0, GetValue, NULL, 0, napi_writable, 0}, + { "hasNamedProperty", 0, HasNamedProperty, 0, 0, 0, napi_default, 0 }, }; NAPI_CALL_RETURN_VOID(env, napi_define_properties( From 86c0ebf4e27988e386b3b198abe8603d1b4211e9 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Wed, 24 May 2017 16:58:03 -0400 Subject: [PATCH 092/227] n-api: add napi_get_version Add napi_get_version function so that addons can query the level of N-API supported. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13207 Fixes: https://github.com/nodejs/abi-stable-node/issues/231 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Jason Ginchereau --- doc/api/n-api.md | 29 ++++++++++++++++++++ src/node_api.cc | 9 ++++++ src/node_api.h | 4 +++ test/addons-napi/test_general/test.js | 4 +++ test/addons-napi/test_general/test_general.c | 9 ++++++ 5 files changed, 55 insertions(+) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index d3efd09ab15ec0..e151a7357fbb58 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -2946,6 +2946,35 @@ the `complete` callback will be invoked with a status value of `napi_cancelled`. The work should not be deleted before the `complete` callback invocation, even if it has been successfully cancelled. +## Version Management + +### napi_get_version + +```C +NAPI_EXTERN napi_status napi_get_version(napi_env env, + uint32_t* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] result`: The highest version of N-API supported. + +Returns `napi_ok` if the API succeeded. + +This API returns the highest N-API version supported by the +Node.js runtime. N-API is planned to be additive such that +newer releases of Node.js may support additional API functions. +In order to allow an addon to use a newer function when running with +versions of Node.js that support it, while providing +fallback behavior when running with Node.js versions that don't +support it: + +* Call `napi_get_version()` to determine if the API is available. +* If available, dynamically load a pointer to the function using `uv_dlsym()`. +* Use the dynamically loaded pointer to invoke the function. +* If the function is not available, provide an alternate implementation + that does not use the function. [Aynchronous Operations]: #n_api_asynchronous_operations [Basic N-API Data Types]: #n_api_basic_n_api_data_types diff --git a/src/node_api.cc b/src/node_api.cc index 976bf15f3ca423..7a485283bbb13d 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -20,6 +20,8 @@ #include "env-inl.h" #include "node_api_backport.h" +#define NAPI_VERSION 1 + static napi_status napi_set_last_error(napi_env env, napi_status error_code, uint32_t engine_error_code = 0, @@ -2715,6 +2717,13 @@ napi_status napi_get_typedarray_info(napi_env env, return napi_clear_last_error(env); } +napi_status napi_get_version(napi_env env, uint32_t* result) { + CHECK_ENV(env); + CHECK_ARG(env, result); + *result = NAPI_VERSION; + return napi_clear_last_error(env); +} + namespace uvimpl { static napi_status ConvertUVErrorCode(int code) { diff --git a/src/node_api.h b/src/node_api.h index 5a95c030a35272..a1840943602ce3 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -478,6 +478,10 @@ NAPI_EXTERN napi_status napi_queue_async_work(napi_env env, NAPI_EXTERN napi_status napi_cancel_async_work(napi_env env, napi_async_work work); + +// version management +NAPI_EXTERN napi_status napi_get_version(napi_env env, uint32_t* result); + EXTERN_C_END #endif // SRC_NODE_API_H_ diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js index a2e57126ce8c22..614cf0f4f0e7c4 100644 --- a/test/addons-napi/test_general/test.js +++ b/test/addons-napi/test_general/test.js @@ -30,3 +30,7 @@ assert.strictEqual(test_general.testGetPrototype(extendedObject), assert.ok(test_general.testGetPrototype(baseObject) !== test_general.testGetPrototype(extendedObject), 'Prototypes for base and extended should be different'); + +// test version management funcitons +// expected version is currently 1 +assert.strictEqual(test_general.testGetVersion(), 1); diff --git a/test/addons-napi/test_general/test_general.c b/test/addons-napi/test_general/test_general.c index 17911b36253df1..585a3bf2b3ff0d 100644 --- a/test/addons-napi/test_general/test_general.c +++ b/test/addons-napi/test_general/test_general.c @@ -25,10 +25,19 @@ napi_value testGetPrototype(napi_env env, napi_callback_info info) { return result; } +napi_value testGetVersion(napi_env env, napi_callback_info info) { + uint32_t version; + napi_value result; + NAPI_CALL(env, napi_get_version(env, &version)); + NAPI_CALL(env ,napi_create_number(env, version, &result)); + return result; +} + void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_property_descriptor descriptors[] = { DECLARE_NAPI_PROPERTY("testStrictEquals", testStrictEquals), DECLARE_NAPI_PROPERTY("testGetPrototype", testGetPrototype), + DECLARE_NAPI_PROPERTY("testGetVersion", testGetVersion), }; NAPI_CALL_RETURN_VOID(env, napi_define_properties( From 0dac33d4f239563fee845157f88588b08b1f35c6 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 26 May 2017 13:24:23 -0400 Subject: [PATCH 093/227] test: improve n-api coverage for typed arrays Add testing for all types of typed arrays. Add testing for napi_is_arraybuffer. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13244 Reviewed-By: Colin Ihrig Reviewed-By: Jason Ginchereau Reviewed-By: James M Snell Reviewed-By: Kunal Pathak Reviewed-By: Hitesh Kanwathirtha --- test/addons-napi/test_typedarray/test.js | 20 ++++++++ .../test_typedarray/test_typedarray.c | 48 +++++++++++++++++++ 2 files changed, 68 insertions(+) diff --git a/test/addons-napi/test_typedarray/test.js b/test/addons-napi/test_typedarray/test.js index cc1fcbe3566da3..dcb7d76442f608 100644 --- a/test/addons-napi/test_typedarray/test.js +++ b/test/addons-napi/test_typedarray/test.js @@ -37,3 +37,23 @@ assert.strictEqual(externalResult.length, 3); assert.strictEqual(externalResult[0], 0); assert.strictEqual(externalResult[1], 1); assert.strictEqual(externalResult[2], 2); + +// validate creation of all kinds of TypedArrays +const buffer = new ArrayBuffer(128); +const arrayTypes = [ Int8Array, Uint8Array, Uint8ClampedArray, Int16Array, + Uint16Array, Int32Array, Uint32Array, Float32Array, + Float64Array ]; + +arrayTypes.forEach((currentType, key) => { + const template = Reflect.construct(currentType, buffer); + const theArray = test_typedarray.CreateTypedArray(template, buffer); + + assert.ok(theArray instanceof currentType, + 'Type of new array should match that of the template'); + assert.notStrictEqual(theArray, + template, + 'the new array should not be a copy of the template'); + assert.strictEqual(theArray.buffer, + buffer, + 'Buffer for array should match the one passed in'); +}); diff --git a/test/addons-napi/test_typedarray/test_typedarray.c b/test/addons-napi/test_typedarray/test_typedarray.c index d77945486518a2..4194492cae8b3a 100644 --- a/test/addons-napi/test_typedarray/test_typedarray.c +++ b/test/addons-napi/test_typedarray/test_typedarray.c @@ -95,10 +95,58 @@ napi_value External(napi_env env, napi_callback_info info) { return output_array; } +napi_value CreateTypedArray(napi_env env, napi_callback_info info) { + size_t argc = 2; + napi_value args[2]; + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); + + NAPI_ASSERT(env, argc == 2, "Wrong number of arguments"); + + napi_value input_array = args[0]; + napi_valuetype valuetype0; + NAPI_CALL(env, napi_typeof(env, input_array, &valuetype0)); + + NAPI_ASSERT(env, valuetype0 == napi_object, + "Wrong type of argments. Expects a typed array as first argument."); + + bool is_typedarray; + NAPI_CALL(env, napi_is_typedarray(env, input_array, &is_typedarray)); + + NAPI_ASSERT(env, is_typedarray, + "Wrong type of argments. Expects a typed array as first argument."); + + napi_valuetype valuetype1; + napi_value input_buffer = args[1]; + NAPI_CALL(env, napi_typeof(env, input_buffer, &valuetype1)); + + NAPI_ASSERT(env, valuetype1 == napi_object, + "Wrong type of argments. Expects an array buffer as second argument."); + + bool is_arraybuffer; + NAPI_CALL(env, napi_is_arraybuffer(env, input_buffer, &is_arraybuffer)); + + NAPI_ASSERT(env, is_arraybuffer, + "Wrong type of argments. Expects an array buffer as second argument."); + + napi_typedarray_type type; + napi_value in_array_buffer; + size_t byte_offset; + size_t length; + NAPI_CALL(env, napi_get_typedarray_info( + env, input_array, &type, &length, NULL, &in_array_buffer, &byte_offset)); + + napi_value output_array; + NAPI_CALL(env, napi_create_typedarray( + env, type, length, input_buffer, byte_offset, &output_array)); + + return output_array; +} + void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_property_descriptor descriptors[] = { DECLARE_NAPI_PROPERTY("Multiply", Multiply), DECLARE_NAPI_PROPERTY("External", External), + DECLARE_NAPI_PROPERTY("CreateTypedArray", CreateTypedArray), }; NAPI_CALL_RETURN_VOID(env, napi_define_properties( From 652d3218fee63147c91e225c7db94e6d71f6e26c Mon Sep 17 00:00:00 2001 From: Jason Ginchereau Date: Thu, 18 May 2017 15:29:49 -0700 Subject: [PATCH 094/227] test: Make N-API weak-ref GC tests asynchronous One of the N-API weak-reference test cases already had to be made asynchronous to handle different behavior in a newer V8 version: https://github.com/nodejs/node/pull/12864 When porting N-API to Node-ChakraCore, we found more of the test cases needed similar treatment: https://github.com/nodejs/node-chakracore/issues/246 So to make thes tests more robust (and avoid having differences in the test code for Node-ChakraCore), I am refactoring the tests in this file to insert a `setImmedate()` callback before every call to `gc()` and assertions about the effects of the GC. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13121 Reviewed-By: Michael Dawson --- test/addons-napi/test_reference/test.js | 168 ++++++++++++++---------- 1 file changed, 99 insertions(+), 69 deletions(-) diff --git a/test/addons-napi/test_reference/test.js b/test/addons-napi/test_reference/test.js index 30effe7eec0922..aaf5531f39e1f3 100644 --- a/test/addons-napi/test_reference/test.js +++ b/test/addons-napi/test_reference/test.js @@ -11,80 +11,110 @@ const test_reference = require(`./build/${common.buildType}/test_reference`); // of a finalizer callback increments the finalizeCount property. assert.strictEqual(test_reference.finalizeCount, 0); -{ - // External value without a finalizer - let value = test_reference.createExternal(); - assert.strictEqual(test_reference.finalizeCount, 0); - assert.strictEqual(typeof value, 'object'); - test_reference.checkExternal(value); - value = null; - global.gc(); - assert.strictEqual(test_reference.finalizeCount, 0); +// Run each test function in sequence, +// with an async delay and GC call between each. +function runTests(i, title, tests) { + if (tests[i]) { + if (typeof tests[i] === 'string') { + title = tests[i]; + runTests(i + 1, title, tests); + } else { + try { + tests[i](); + } catch (e) { + console.error('Test failed: ' + title); + throw e; + } + setImmediate(() => { + global.gc(); + runTests(i + 1, title, tests); + }); + } + } } +runTests(0, undefined, [ -{ - // External value with a finalizer - let value = test_reference.createExternalWithFinalize(); - assert.strictEqual(test_reference.finalizeCount, 0); - assert.strictEqual(typeof value, 'object'); - test_reference.checkExternal(value); - value = null; - global.gc(); - assert.strictEqual(test_reference.finalizeCount, 1); -} - -{ - // Strong reference - let value = test_reference.createExternalWithFinalize(); - assert.strictEqual(test_reference.finalizeCount, 0); - test_reference.createReference(value, 1); - assert.strictEqual(test_reference.referenceValue, value); - value = null; - global.gc(); // Value should NOT be GC'd because there is a strong ref - assert.strictEqual(test_reference.finalizeCount, 0); - test_reference.deleteReference(); - global.gc(); // Value should be GC'd because the strong ref was deleted - assert.strictEqual(test_reference.finalizeCount, 1); -} - -{ - // Strong reference, increment then decrement to weak reference - let value = test_reference.createExternalWithFinalize(); - assert.strictEqual(test_reference.finalizeCount, 0); - test_reference.createReference(value, 1); - value = null; - global.gc(); // Value should NOT be GC'd because there is a strong ref - assert.strictEqual(test_reference.finalizeCount, 0); + 'External value without a finalizer', + () => { + const value = test_reference.createExternal(); + assert.strictEqual(test_reference.finalizeCount, 0); + assert.strictEqual(typeof value, 'object'); + test_reference.checkExternal(value); + }, + () => { + assert.strictEqual(test_reference.finalizeCount, 0); + }, - assert.strictEqual(test_reference.incrementRefcount(), 2); - global.gc(); // Value should NOT be GC'd because there is a strong ref - assert.strictEqual(test_reference.finalizeCount, 0); - - assert.strictEqual(test_reference.decrementRefcount(), 1); - global.gc(); // Value should NOT be GC'd because there is a strong ref - assert.strictEqual(test_reference.finalizeCount, 0); + 'External value with a finalizer', + () => { + const value = test_reference.createExternalWithFinalize(); + assert.strictEqual(test_reference.finalizeCount, 0); + assert.strictEqual(typeof value, 'object'); + test_reference.checkExternal(value); + }, + () => { + assert.strictEqual(test_reference.finalizeCount, 1); + }, - assert.strictEqual(test_reference.decrementRefcount(), 0); - global.gc(); // Value should be GC'd because the ref is now weak! - assert.strictEqual(test_reference.finalizeCount, 1); + 'Weak reference', + () => { + const value = test_reference.createExternalWithFinalize(); + assert.strictEqual(test_reference.finalizeCount, 0); + test_reference.createReference(value, 0); + assert.strictEqual(test_reference.referenceValue, value); + }, + () => { + // Value should be GC'd because there is only a weak ref + assert.strictEqual(test_reference.referenceValue, undefined); + assert.strictEqual(test_reference.finalizeCount, 1); + test_reference.deleteReference(); + }, - test_reference.deleteReference(); - global.gc(); // Value was already GC'd - assert.strictEqual(test_reference.finalizeCount, 1); -} + 'Strong reference', + () => { + const value = test_reference.createExternalWithFinalize(); + assert.strictEqual(test_reference.finalizeCount, 0); + test_reference.createReference(value, 1); + assert.strictEqual(test_reference.referenceValue, value); + }, + () => { + // Value should NOT be GC'd because there is a strong ref + assert.strictEqual(test_reference.finalizeCount, 0); + test_reference.deleteReference(); + }, + () => { + // Value should be GC'd because the strong ref was deleted + assert.strictEqual(test_reference.finalizeCount, 1); + }, -{ - // Weak reference - let value = test_reference.createExternalWithFinalize(); - assert.strictEqual(test_reference.finalizeCount, 0); - test_reference.createReference(value, 0); - assert.strictEqual(test_reference.referenceValue, value); - value = null; - setImmediate(common.mustCall(() => { - // This test only works if gc() is called from an immediate callback. - global.gc(); // Value should be GC'd because there is only a weak ref - assert.strictEqual(test_reference.referenceValue, undefined); + 'Strong reference, increment then decrement to weak reference', + () => { + const value = test_reference.createExternalWithFinalize(); + assert.strictEqual(test_reference.finalizeCount, 0); + test_reference.createReference(value, 1); + }, + () => { + // Value should NOT be GC'd because there is a strong ref + assert.strictEqual(test_reference.finalizeCount, 0); + assert.strictEqual(test_reference.incrementRefcount(), 2); + }, + () => { + // Value should NOT be GC'd because there is a strong ref + assert.strictEqual(test_reference.finalizeCount, 0); + assert.strictEqual(test_reference.decrementRefcount(), 1); + }, + () => { + // Value should NOT be GC'd because there is a strong ref + assert.strictEqual(test_reference.finalizeCount, 0); + assert.strictEqual(test_reference.decrementRefcount(), 0); + }, + () => { + // Value should be GC'd because the ref is now weak! assert.strictEqual(test_reference.finalizeCount, 1); test_reference.deleteReference(); - })); -} + }, + () => { + // Value was already GC'd + assert.strictEqual(test_reference.finalizeCount, 1); + }, +]); From 17fe21e83dcc241a4634f88fef436f30199fc4fc Mon Sep 17 00:00:00 2001 From: JongChan Choi Date: Wed, 31 May 2017 14:04:14 +0900 Subject: [PATCH 095/227] doc: fix typo in n-api.md Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13323 Reviewed-By: Luigi Pinca --- doc/api/n-api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index e151a7357fbb58..7da9e5171664a9 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -530,7 +530,7 @@ Taking the earlier example, adding calls to [`napi_open_handle_scope`][] and is valid throughout the execution of the loop: ```C -for (int i = 0; i < 1000000; i++) {napi_ +for (int i = 0; i < 1000000; i++) { napi_handle_scope scope; napi_status status = napi_open_handle_scope(env, &scope); if (status != napi_ok) { From 919556f27a34e171984b235b01be3444faf4a540 Mon Sep 17 00:00:00 2001 From: Jason Ginchereau Date: Thu, 25 May 2017 17:05:21 -0700 Subject: [PATCH 096/227] n-api: enable napi_wrap() to work with any object Previously, napi_wrap() would only work with objects created from a constructor returned by napi_define_class(). While the N-API team was aware of this limitation, it was not clearly documented and is likely to cause confusion anyway. It's much simpler if addons are allowed to use any JS object. Also, the specific behavior of the limitation is difficult to reimplement on other VMs that work differently from V8. V8 requires object internal fields to be declared on the object prototype (which napi_define_class() used to do). Since it's too late to modify the object prototype by the time napi_wrap() is called, napi_wrap() now inserts a new object (with the internal field) into the supplied object's prototype chain. Then it can be retrieved from there later by napi_unwrap(). This change also includes improvements to the documentation for napi_create_external(), partly to explain how it is different from napi_wrap(). Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13250 Reviewed-By: Michael Dawson --- doc/api/n-api.md | 57 ++++++++----- src/node_api.cc | 42 ++++++---- test/addons-napi/test_object/test.js | 98 +++++++++++++++------- test/addons-napi/test_object/test_object.c | 27 ++++++ 4 files changed, 159 insertions(+), 65 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 7da9e5171664a9..ab90cf05f4750d 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -1059,20 +1059,24 @@ napi_status napi_create_external(napi_env env, ``` - `[in] env`: The environment that the API is invoked under. -- `[in] data`: Raw pointer to the external data being wrapped. -- `[in] finalize_cb`: Optional callback to call when the wrapped object +- `[in] data`: Raw pointer to the external data. +- `[in] finalize_cb`: Optional callback to call when the external value is being collected. - `[in] finalize_hint`: Optional hint to pass to the finalize callback during collection. -- `[out] result`: A `napi_value` representing an external object. +- `[out] result`: A `napi_value` representing an external value. Returns `napi_ok` if the API succeeded. -This API allocates a JavaScript object with external data attached to it. -This is used to wrap native objects and project them into JavaScript. -The API allows the caller to pass in a finalize callback, in case the -underlying native resource needs to be cleaned up when the wrapper -JavaScript object gets collected. +This API allocates a JavaScript value with external data attached to it. This +is used to pass external data through JavaScript code, so it can be retrieved +later by native code. The API allows the caller to pass in a finalize callback, +in case the underlying native resource needs to be cleaned up when the external +JavaScript value gets collected. + +*Note*: The created value is not an object, and therefore does not support +additional properties. It is considered a distinct value type: calling +`napi_typeof()` with an external value yields `napi_external`. #### napi_create_external_arraybuffer -```C -napi_status napi_get_value_string_length(napi_env env, - napi_value value, - int* result) -``` - -- `[in] env`: The environment that the API is invoked under. -- `[in] value`: `napi_value` representing JavaScript string. -- `[out] result`: Number of characters in the given JavaScript string. - -Returns `napi_ok` if the API succeeded. If a non-String `napi_value` -is passed in it returns `napi_string_expected`. - -This API returns the number of characters in the given JavaScript string. - #### *napi_get_value_string_utf8* ```C -NODE_EXTERN napi_status napi_create_error(napi_env env, const char* msg); +NODE_EXTERN napi_status napi_create_error(napi_env env, + const char* msg, + napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. - `[in] msg`: C string representing the text to be associated with. @@ -422,7 +424,9 @@ This API returns a JavaScript Error with the text provided. added: v8.0.0 --> ```C -NODE_EXTERN napi_status napi_create_type_error(napi_env env, const char* msg); +NODE_EXTERN napi_status napi_create_type_error(napi_env env, + const char* msg, + napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. - `[in] msg`: C string representing the text to be associated with. @@ -438,7 +442,9 @@ This API returns a JavaScript TypeError with the text provided. added: v8.0.0 --> ```C -NODE_EXTERN napi_status napi_create_range_error(napi_env env, const char* msg); +NODE_EXTERN napi_status napi_create_range_error(napi_env env, + const char* msg, + napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. - `[in] msg`: C string representing the text to be associated with. From 1e91d5804df53f0fdf8212ddbf0780111cae922b Mon Sep 17 00:00:00 2001 From: XadillaX Date: Fri, 9 Jun 2017 18:14:22 +0800 Subject: [PATCH 105/227] doc: fix out of date napi_callback doc The earlier version `napi_callback` returns `void` but now is `napi_value`. The document of this section hasn't been modified. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13570 Fixes: https://github.com/nodejs/node/issues/12248 Fixes: https://github.com/nodejs/node/issues/13562 Reviewed-By: Gibson Fahnestock Reviewed-By: Colin Ihrig Reviewed-By: Jason Ginchereau Reviewed-By: James M Snell --- doc/api/n-api.md | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 72b35f3810bbbd..da93f7c9d67c65 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -161,18 +161,16 @@ For more details, review the [Object Lifetime Management][]. ### N-API Callback types #### *napi_callback_info* -Opaque datatype that is passed to a callback function. It can be used for two -purposes: -- Get additional information about the context in which the callback was - invoked. -- Set the return value of the callback. +Opaque datatype that is passed to a callback function. It can be used for +getting additional information about the context in which the callback was +invoked. #### *napi_callback* Function pointer type for user-provided native functions which are to be exposed to JavaScript via N-API. Callback functions should satisfy the following signature: ```C -typedef void (*napi_callback)(napi_env, napi_callback_info); +typedef napi_value (*napi_callback)(napi_env, napi_callback_info); ``` #### *napi_finalize* @@ -2516,8 +2514,9 @@ In order to expose a function as part of the add-on's module exports, set the newly created function on the exports object. A sample module might look as follows: ```C -void SayHello(napi_env env, napi_callback_info info) { +napi_value SayHello(napi_env env, napi_callback_info info) { printf("Hello\n"); + return nullptr; } void Init(napi_env env, napi_value exports, napi_value module, void* priv) { From cc3a4af7c89350a49b9d24c5472349a972880573 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Tue, 13 Jun 2017 01:02:52 -0400 Subject: [PATCH 106/227] doc: fix a few n-api doc issues - Add doc for napi_create_string_latin1(). - Fix signatures where c string was specified instead of napi_value. - Fix return type of napi_callback. - Update to specify that napi_escape_handle() can only be called once for a given scope. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13650 Fixes: https://github.com/nodejs/node/issues/13555 Fixes: https://github.com/nodejs/node/issues/13556 Fixes: https://github.com/nodejs/node/issues/13562 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Jason Ginchereau Reviewed-By: Ingvar Stepanyan --- doc/api/n-api.md | 69 ++++++++++++++++++++++++++++++++++-------------- 1 file changed, 49 insertions(+), 20 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index da93f7c9d67c65..f8154d2cb490ec 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -406,11 +406,12 @@ added: v8.0.0 --> ```C NODE_EXTERN napi_status napi_create_error(napi_env env, - const char* msg, + napi_value msg, napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. -- `[in] msg`: C string representing the text to be associated with. +- `[in] msg`: napi_value that references a JavaScript String to be +used as the message for the Error. - `[out] result`: `napi_value` representing the error created. Returns `napi_ok` if the API succeeded. @@ -423,11 +424,12 @@ added: v8.0.0 --> ```C NODE_EXTERN napi_status napi_create_type_error(napi_env env, - const char* msg, + napi_value msg, napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. -- `[in] msg`: C string representing the text to be associated with. +- `[in] msg`: napi_value that references a JavaScript String to be +used as the message for the Error. - `[out] result`: `napi_value` representing the error created. Returns `napi_ok` if the API succeeded. @@ -445,7 +447,8 @@ NODE_EXTERN napi_status napi_create_range_error(napi_env env, napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. -- `[in] msg`: C string representing the text to be associated with. +- `[in] msg`: napi_value that references a JavaScript String to be +used as the message for the Error. - `[out] result`: `napi_value` representing the error created. Returns `napi_ok` if the API succeeded. @@ -562,16 +565,17 @@ for (int i = 0; i < 1000000; i++) { ``` When nesting scopes, there are cases where a handle from an -inner scope needs to live beyond the lifespan of that scope. N-API supports an +inner scope needs to live beyond the lifespan of that scope. N-API supports an 'escapable scope' in order to support this case. An escapable scope -allows one or more handles to be 'promoted' so that they 'escape' the -current scope and the lifespan of the handle(s) changes from the current +allows one handle to be 'promoted' so that it 'escapes' the +current scope and the lifespan of the handle changes from the current scope to that of the outer scope. The methods available to open/close escapable scopes are [`napi_open_escapable_handle_scope`][] and [`napi_close_escapable_handle_scope`][]. -The request to promote a handle is made through the [`napi_escape_handle`][]. +The request to promote a handle is made through [`napi_escape_handle`][] which +can only be called once. #### napi_open_handle_scope ```C napi_status napi_create_symbol(napi_env env, - const char* description, + napi_value description, napi_value* result) ``` - `[in] env`: The environment that the API is invoked under. -- `[in] description`: Null-terminated character buffer representing a -UTF8-encoded string to describe the symbol. +- `[in] description`: Optional napi_value which refers to a JavaScript +String to be set as the description for the symbol. - `[out] result`: A `napi_value` representing a JavaScript Symbol. Returns `napi_ok` if the API succeeded. @@ -1299,8 +1303,8 @@ napi_status napi_create_string_utf16(napi_env env, - `[in] env`: The environment that the API is invoked under. - `[in] str`: Character buffer representing a UTF16-LE-encoded string. -- `[in] length`: The length of the string in characters, or -1 if it is -null-terminated. +- `[in] length`: The length of the string in two-byte code units, or -1 if +it is null-terminated. - `[out] result`: A `napi_value` representing a JavaScript String. Returns `napi_ok` if the API succeeded. @@ -1311,6 +1315,31 @@ The JavaScript String type is described in [Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type) of the ECMAScript Language Specification. +#### *napi_create_string_latin1* + +```C +NAPI_EXTERN napi_status napi_create_string_latin1(napi_env env, + const char* str, + size_t length, + napi_value* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] str`: Character buffer representing a latin1-encoded string. +- `[in] length`: The length of the string in bytes, or -1 if it is +null-terminated. +- `[out] result`: A `napi_value` representing a JavaScript String. + +Returns `napi_ok` if the API succeeded. + +This API creates a JavaScript String object from a latin1-encoded C string. + +The JavaScript String type is described in +[Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type) +of the ECMAScript Language Specification. + #### *napi_create_string_utf8* +```C +napi_status napi_delete_element(napi_env env, + napi_value object, + uint32_t index, + bool* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object to query. +- `[in] index`: The index of the property to delete. +- `[out] result`: Whether the element deletion succeeded or not. `result` can +optionally be ignored by passing `NULL`. + +Returns `napi_ok` if the API succeeded. + +This API attempts to delete the specified `index` from `object`. + #### *napi_define_properties* +```C +napi_status napi_delete_property(napi_env env, + napi_value object, + napi_value key, + bool* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object to query. +- `[in] key`: The name of the property to delete. +- `[out] result`: Whether the property deletion succeeded or not. `result` can +optionally be ignored by passing `NULL`. + +Returns `napi_ok` if the API succeeded. + +This API attempts to delete the `key` own property from `object`. + + #### *napi_set_named_property* +```C +napi_status napi_has_own_property(napi_env env, + napi_value object, + napi_value key, + bool* result); +``` + +- `[in] env`: The environment that the N-API call is invoked under. +- `[in] object`: The object to query. +- `[in] key`: The name of the own property whose existence to check. +- `[out] result`: Whether the own property exists on the object or not. + +Returns `napi_ok` if the API succeeded. + +This API checks if the Object passed in has the named own property. `key` must +be a string or a Symbol, or an error will be thrown. N-API will not perform any +conversion between data types. + + #### *napi_set_named_property* ```C -NODE_EXTERN napi_status napi_throw_error(napi_env env, const char* msg); +NODE_EXTERN napi_status napi_throw_error(napi_env env, + const char* code, + const char* msg); ``` - `[in] env`: The environment that the API is invoked under. +- `[in] code`: Optional error code to be set on the error. - `[in] msg`: C string representing the text to be associated with the error. @@ -358,9 +386,12 @@ This API throws a JavaScript Error with the text provided. added: v8.0.0 --> ```C -NODE_EXTERN napi_status napi_throw_type_error(napi_env env, const char* msg); +NODE_EXTERN napi_status napi_throw_type_error(napi_env env, + const char* code, + const char* msg); ``` - `[in] env`: The environment that the API is invoked under. +- `[in] code`: Optional error code to be set on the error. - `[in] msg`: C string representing the text to be associated with the error. @@ -373,9 +404,12 @@ This API throws a JavaScript TypeError with the text provided. added: v8.0.0 --> ```C -NODE_EXTERN napi_status napi_throw_range_error(napi_env env, const char* msg); +NODE_EXTERN napi_status napi_throw_range_error(napi_env env, + const char* code, + const char* msg); ``` - `[in] env`: The environment that the API is invoked under. +- `[in] code`: Optional error code to be set on the error. - `[in] msg`: C string representing the text to be associated with the error. @@ -409,10 +443,13 @@ added: v8.0.0 --> ```C NODE_EXTERN napi_status napi_create_error(napi_env env, + napi_value code, napi_value msg, napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. +- `[in] code`: Optional `napi_value` with the string for the error code to + be associated with the error. - `[in] msg`: napi_value that references a JavaScript String to be used as the message for the Error. - `[out] result`: `napi_value` representing the error created. @@ -427,10 +464,13 @@ added: v8.0.0 --> ```C NODE_EXTERN napi_status napi_create_type_error(napi_env env, + napi_value code, napi_value msg, napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. +- `[in] code`: Optional `napi_value` with the string for the error code to + be associated with the error. - `[in] msg`: napi_value that references a JavaScript String to be used as the message for the Error. - `[out] result`: `napi_value` representing the error created. @@ -446,10 +486,13 @@ added: v8.0.0 --> ```C NODE_EXTERN napi_status napi_create_range_error(napi_env env, + napi_value code, const char* msg, napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. +- `[in] code`: Optional `napi_value` with the string for the error code to + be associated with the error. - `[in] msg`: napi_value that references a JavaScript String to be used as the message for the Error. - `[out] result`: `napi_value` representing the error created. diff --git a/src/node_api.cc b/src/node_api.cc index fee00dabe9a14d..25e8c6385572e7 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -21,6 +21,7 @@ #include "node_internals.h" #include "env-inl.h" #include "node_api_backport.h" +#include "util.h" #define NAPI_VERSION 1 @@ -1525,7 +1526,61 @@ napi_status napi_create_symbol(napi_env env, return GET_RETURN_STATUS(env); } +static napi_status set_error_code(napi_env env, + v8::Local error, + napi_value code, + const char* code_cstring) { + if ((code != nullptr) || (code_cstring != nullptr)) { + v8::Isolate* isolate = env->isolate; + v8::Local context = isolate->GetCurrentContext(); + v8::Local err_object = error.As(); + + v8::Local code_value = v8impl::V8LocalValueFromJsValue(code); + if (code != nullptr) { + code_value = v8impl::V8LocalValueFromJsValue(code); + RETURN_STATUS_IF_FALSE(env, code_value->IsString(), napi_string_expected); + } else { + CHECK_NEW_FROM_UTF8(env, code_value, code_cstring); + } + + v8::Local code_key; + CHECK_NEW_FROM_UTF8(env, code_key, "code"); + + v8::Maybe set_maybe = err_object->Set(context, code_key, code_value); + RETURN_STATUS_IF_FALSE(env, + set_maybe.FromMaybe(false), + napi_generic_failure); + + // now update the name to be "name [code]" where name is the + // original name and code is the code associated with the Error + v8::Local name_string; + CHECK_NEW_FROM_UTF8(env, name_string, ""); + v8::Local name_key; + CHECK_NEW_FROM_UTF8(env, name_key, "name"); + + auto maybe_name = err_object->Get(context, name_key); + if (!maybe_name.IsEmpty()) { + v8::Local name = maybe_name.ToLocalChecked(); + if (name->IsString()) { + name_string = v8::String::Concat(name_string, name.As()); + } + } + name_string = v8::String::Concat(name_string, + FIXED_ONE_BYTE_STRING(isolate, " [")); + name_string = v8::String::Concat(name_string, code_value.As()); + name_string = v8::String::Concat(name_string, + FIXED_ONE_BYTE_STRING(isolate, "]")); + + set_maybe = err_object->Set(context, name_key, name_string); + RETURN_STATUS_IF_FALSE(env, + set_maybe.FromMaybe(false), + napi_generic_failure); + } + return napi_ok; +} + napi_status napi_create_error(napi_env env, + napi_value code, napi_value msg, napi_value* result) { NAPI_PREAMBLE(env); @@ -1535,13 +1590,18 @@ napi_status napi_create_error(napi_env env, v8::Local message_value = v8impl::V8LocalValueFromJsValue(msg); RETURN_STATUS_IF_FALSE(env, message_value->IsString(), napi_string_expected); - *result = v8impl::JsValueFromV8LocalValue(v8::Exception::Error( - message_value.As())); + v8::Local error_obj = + v8::Exception::Error(message_value.As()); + napi_status status = set_error_code(env, error_obj, code, nullptr); + if (status != napi_ok) return status; + + *result = v8impl::JsValueFromV8LocalValue(error_obj); return GET_RETURN_STATUS(env); } napi_status napi_create_type_error(napi_env env, + napi_value code, napi_value msg, napi_value* result) { NAPI_PREAMBLE(env); @@ -1551,13 +1611,18 @@ napi_status napi_create_type_error(napi_env env, v8::Local message_value = v8impl::V8LocalValueFromJsValue(msg); RETURN_STATUS_IF_FALSE(env, message_value->IsString(), napi_string_expected); - *result = v8impl::JsValueFromV8LocalValue(v8::Exception::TypeError( - message_value.As())); + v8::Local error_obj = + v8::Exception::TypeError(message_value.As()); + napi_status status = set_error_code(env, error_obj, code, nullptr); + if (status != napi_ok) return status; + + *result = v8impl::JsValueFromV8LocalValue(error_obj); return GET_RETURN_STATUS(env); } napi_status napi_create_range_error(napi_env env, + napi_value code, napi_value msg, napi_value* result) { NAPI_PREAMBLE(env); @@ -1567,8 +1632,12 @@ napi_status napi_create_range_error(napi_env env, v8::Local message_value = v8impl::V8LocalValueFromJsValue(msg); RETURN_STATUS_IF_FALSE(env, message_value->IsString(), napi_string_expected); - *result = v8impl::JsValueFromV8LocalValue(v8::Exception::RangeError( - message_value.As())); + v8::Local error_obj = + v8::Exception::RangeError(message_value.As()); + napi_status status = set_error_code(env, error_obj, code, nullptr); + if (status != napi_ok) return status; + + *result = v8impl::JsValueFromV8LocalValue(error_obj); return GET_RETURN_STATUS(env); } @@ -1741,40 +1810,58 @@ napi_status napi_throw(napi_env env, napi_value error) { return napi_clear_last_error(env); } -napi_status napi_throw_error(napi_env env, const char* msg) { +napi_status napi_throw_error(napi_env env, + const char* code, + const char* msg) { NAPI_PREAMBLE(env); v8::Isolate* isolate = env->isolate; v8::Local str; CHECK_NEW_FROM_UTF8(env, str, msg); - isolate->ThrowException(v8::Exception::Error(str)); + v8::Local error_obj = v8::Exception::Error(str); + napi_status status = set_error_code(env, error_obj, nullptr, code); + if (status != napi_ok) return status; + + isolate->ThrowException(error_obj); // any VM calls after this point and before returning // to the javascript invoker will fail return napi_clear_last_error(env); } -napi_status napi_throw_type_error(napi_env env, const char* msg) { +napi_status napi_throw_type_error(napi_env env, + const char* code, + const char* msg) { NAPI_PREAMBLE(env); v8::Isolate* isolate = env->isolate; v8::Local str; CHECK_NEW_FROM_UTF8(env, str, msg); - isolate->ThrowException(v8::Exception::TypeError(str)); + v8::Local error_obj = v8::Exception::TypeError(str); + napi_status status = set_error_code(env, error_obj, nullptr, code); + if (status != napi_ok) return status; + + isolate->ThrowException(error_obj); // any VM calls after this point and before returning // to the javascript invoker will fail return napi_clear_last_error(env); } -napi_status napi_throw_range_error(napi_env env, const char* msg) { +napi_status napi_throw_range_error(napi_env env, + const char* code, + const char* msg) { NAPI_PREAMBLE(env); v8::Isolate* isolate = env->isolate; v8::Local str; CHECK_NEW_FROM_UTF8(env, str, msg); - isolate->ThrowException(v8::Exception::RangeError(str)); + v8::Local error_obj = v8::Exception::RangeError(str); + napi_status status = set_error_code(env, error_obj, nullptr, code); + if (status != napi_ok) return status; + + isolate->ThrowException(error_obj); // any VM calls after this point and before returning // to the javascript invoker will fail return napi_clear_last_error(env); @@ -2394,7 +2481,9 @@ napi_status napi_instanceof(napi_env env, CHECK_TO_OBJECT(env, context, ctor, constructor); if (!ctor->IsFunction()) { - napi_throw_type_error(env, "constructor must be a function"); + napi_throw_type_error(env, + "ERR_NAPI_CONS_FUNCTION", + "Constructor must be a function"); return napi_set_last_error(env, napi_function_expected); } @@ -2462,7 +2551,10 @@ napi_status napi_instanceof(napi_env env, v8::Local prototype_property = maybe_prototype.ToLocalChecked(); if (!prototype_property->IsObject()) { - napi_throw_type_error(env, "constructor.prototype must be an object"); + napi_throw_type_error( + env, + "ERR_NAPI_CONS_PROTOTYPE_OBJECT", + "Constructor.prototype must be an object"); return napi_set_last_error(env, napi_object_expected); } diff --git a/src/node_api.h b/src/node_api.h index e346b762dd2702..af98789d418210 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -142,12 +142,15 @@ NAPI_EXTERN napi_status napi_create_function(napi_env env, void* data, napi_value* result); NAPI_EXTERN napi_status napi_create_error(napi_env env, + napi_value code, napi_value msg, napi_value* result); NAPI_EXTERN napi_status napi_create_type_error(napi_env env, + napi_value code, napi_value msg, napi_value* result); NAPI_EXTERN napi_status napi_create_range_error(napi_env env, + napi_value code, napi_value msg, napi_value* result); @@ -404,9 +407,15 @@ NAPI_EXTERN napi_status napi_escape_handle(napi_env env, // Methods to support error handling NAPI_EXTERN napi_status napi_throw(napi_env env, napi_value error); -NAPI_EXTERN napi_status napi_throw_error(napi_env env, const char* msg); -NAPI_EXTERN napi_status napi_throw_type_error(napi_env env, const char* msg); -NAPI_EXTERN napi_status napi_throw_range_error(napi_env env, const char* msg); +NAPI_EXTERN napi_status napi_throw_error(napi_env env, + const char* code, + const char* msg); +NAPI_EXTERN napi_status napi_throw_type_error(napi_env env, + const char* code, + const char* msg); +NAPI_EXTERN napi_status napi_throw_range_error(napi_env env, + const char* code, + const char* msg); NAPI_EXTERN napi_status napi_is_error(napi_env env, napi_value value, bool* result); diff --git a/test/addons-napi/common.h b/test/addons-napi/common.h index e9640935d4154b..422418ced49a39 100644 --- a/test/addons-napi/common.h +++ b/test/addons-napi/common.h @@ -12,7 +12,7 @@ const char* error_message = error_info->error_message != NULL ? \ error_info->error_message : \ "empty error message"; \ - napi_throw_error((env), error_message); \ + napi_throw_error((env), NULL, error_message); \ } \ } while (0) @@ -21,6 +21,7 @@ if (!(assertion)) { \ napi_throw_error( \ (env), \ + NULL, \ "assertion (" #assertion ") failed: " message); \ return ret_val; \ } \ diff --git a/test/addons-napi/test_async/test_async.cc b/test/addons-napi/test_async/test_async.cc index cc7bc334304323..f257b268b93159 100644 --- a/test/addons-napi/test_async/test_async.cc +++ b/test/addons-napi/test_async/test_async.cc @@ -29,7 +29,7 @@ void Execute(napi_env env, void* data) { carrier* c = static_cast(data); if (c != &the_carrier) { - napi_throw_type_error(env, "Wrong data parameter to Execute."); + napi_throw_type_error(env, nullptr, "Wrong data parameter to Execute."); return; } @@ -40,12 +40,12 @@ void Complete(napi_env env, napi_status status, void* data) { carrier* c = static_cast(data); if (c != &the_carrier) { - napi_throw_type_error(env, "Wrong data parameter to Complete."); + napi_throw_type_error(env, nullptr, "Wrong data parameter to Complete."); return; } if (status != napi_ok) { - napi_throw_type_error(env, "Execute callback failed."); + napi_throw_type_error(env, nullptr, "Execute callback failed."); return; } diff --git a/test/addons-napi/test_error/test.js b/test/addons-napi/test_error/test.js index f7479f2c9a64d8..80f99f48ba79f3 100644 --- a/test/addons-napi/test_error/test.js +++ b/test/addons-napi/test_error/test.js @@ -72,6 +72,30 @@ assert.throws(() => { test_error.throwTypeError(); }, /^TypeError: type error$/); +assert.throws( + () => test_error.throwErrorCode(), + common.expectsError({ + code: 'ERR_TEST_CODE', + message: 'Error [error]' + }) +); + +assert.throws( + () => test_error.throwRangeErrorCode(), + common.expectsError({ + code: 'ERR_TEST_CODE', + message: 'RangeError [range error]' + }) +); + +assert.throws( + () => test_error.throwTypeErrorCode(), + common.expectsError({ + code: 'ERR_TEST_CODE', + message: 'TypeError [type error]' + }) +); + let error = test_error.createError(); assert.ok(error instanceof Error, 'expected error to be an instance of Error'); assert.strictEqual(error.message, 'error', 'expected message to be "error"'); @@ -89,3 +113,41 @@ assert.ok(error instanceof TypeError, assert.strictEqual(error.message, 'type error', 'expected message to be "type error"'); + +error = test_error.createErrorCode(); +assert.ok(error instanceof Error, 'expected error to be an instance of Error'); +assert.strictEqual(error.code, + 'ERR_TEST_CODE', + 'expected code to be "ERR_TEST_CODE"'); +assert.strictEqual(error.message, + 'Error [error]', + 'expected message to be "Error [error]"'); +assert.strictEqual(error.name, + 'Error [ERR_TEST_CODE]', + 'expected name to be "Error [ERR_TEST_CODE]"'); + +error = test_error.createRangeErrorCode(); +assert.ok(error instanceof RangeError, + 'expected error to be an instance of RangeError'); +assert.strictEqual(error.message, + 'RangeError [range error]', + 'expected message to be "RangeError [range error]"'); +assert.strictEqual(error.code, + 'ERR_TEST_CODE', + 'expected code to be "ERR_TEST_CODE"'); +assert.strictEqual(error.name, + 'RangeError [ERR_TEST_CODE]', + 'expected name to be "RangeError[ERR_TEST_CODE]"'); + +error = test_error.createTypeErrorCode(); +assert.ok(error instanceof TypeError, + 'expected error to be an instance of TypeError'); +assert.strictEqual(error.message, + 'TypeError [type error]', + 'expected message to be "TypeError [type error]"'); +assert.strictEqual(error.code, + 'ERR_TEST_CODE', + 'expected code to be "ERR_TEST_CODE"'); +assert.strictEqual(error.name, + 'TypeError [ERR_TEST_CODE]', + 'expected name to be "TypeError[ERR_TEST_CODE]"'); diff --git a/test/addons-napi/test_error/test_error.cc b/test/addons-napi/test_error/test_error.cc index ddba2059f23be6..29aba1f1ef62f6 100644 --- a/test/addons-napi/test_error/test_error.cc +++ b/test/addons-napi/test_error/test_error.cc @@ -19,31 +19,51 @@ napi_value throwExistingError(napi_env env, napi_callback_info info) { napi_value message; napi_value error; NAPI_CALL(env, napi_create_string_utf8(env, "existing error", -1, &message)); - NAPI_CALL(env, napi_create_error(env, message, &error)); + NAPI_CALL(env, napi_create_error(env, nullptr, message, &error)); NAPI_CALL(env, napi_throw(env, error)); return nullptr; } napi_value throwError(napi_env env, napi_callback_info info) { - NAPI_CALL(env, napi_throw_error(env, "error")); + NAPI_CALL(env, napi_throw_error(env, nullptr, "error")); return nullptr; } napi_value throwRangeError(napi_env env, napi_callback_info info) { - NAPI_CALL(env, napi_throw_range_error(env, "range error")); + NAPI_CALL(env, napi_throw_range_error(env, nullptr, "range error")); return nullptr; } napi_value throwTypeError(napi_env env, napi_callback_info info) { - NAPI_CALL(env, napi_throw_type_error(env, "type error")); + NAPI_CALL(env, napi_throw_type_error(env, nullptr, "type error")); return nullptr; } +napi_value throwErrorCode(napi_env env, napi_callback_info info) { + NAPI_CALL(env, napi_throw_error(env, "ERR_TEST_CODE", "Error [error]")); + return nullptr; +} + +napi_value throwRangeErrorCode(napi_env env, napi_callback_info info) { + NAPI_CALL(env, napi_throw_range_error(env, + "ERR_TEST_CODE", + "RangeError [range error]")); + return nullptr; +} + +napi_value throwTypeErrorCode(napi_env env, napi_callback_info info) { + NAPI_CALL(env, napi_throw_type_error(env, + "ERR_TEST_CODE", + "TypeError [type error]")); + return nullptr; +} + + napi_value createError(napi_env env, napi_callback_info info) { napi_value result; napi_value message; NAPI_CALL(env, napi_create_string_utf8(env, "error", -1, &message)); - NAPI_CALL(env, napi_create_error(env, message, &result)); + NAPI_CALL(env, napi_create_error(env, nullptr, message, &result)); return result; } @@ -51,7 +71,7 @@ napi_value createRangeError(napi_env env, napi_callback_info info) { napi_value result; napi_value message; NAPI_CALL(env, napi_create_string_utf8(env, "range error", -1, &message)); - NAPI_CALL(env, napi_create_range_error(env, message, &result)); + NAPI_CALL(env, napi_create_range_error(env, nullptr, message, &result)); return result; } @@ -59,7 +79,43 @@ napi_value createTypeError(napi_env env, napi_callback_info info) { napi_value result; napi_value message; NAPI_CALL(env, napi_create_string_utf8(env, "type error", -1, &message)); - NAPI_CALL(env, napi_create_type_error(env, message, &result)); + NAPI_CALL(env, napi_create_type_error(env, nullptr, message, &result)); + return result; +} + +napi_value createErrorCode(napi_env env, napi_callback_info info) { + napi_value result; + napi_value message; + napi_value code; + NAPI_CALL(env, napi_create_string_utf8(env, "Error [error]", -1, &message)); + NAPI_CALL(env, napi_create_string_utf8(env, "ERR_TEST_CODE", -1, &code)); + NAPI_CALL(env, napi_create_error(env, code, message, &result)); + return result; +} + +napi_value createRangeErrorCode(napi_env env, napi_callback_info info) { + napi_value result; + napi_value message; + napi_value code; + NAPI_CALL(env, napi_create_string_utf8(env, + "RangeError [range error]", + -1, + &message)); + NAPI_CALL(env, napi_create_string_utf8(env, "ERR_TEST_CODE", -1, &code)); + NAPI_CALL(env, napi_create_range_error(env, code, message, &result)); + return result; +} + +napi_value createTypeErrorCode(napi_env env, napi_callback_info info) { + napi_value result; + napi_value message; + napi_value code; + NAPI_CALL(env, napi_create_string_utf8(env, + "TypeError [type error]", + -1, + &message)); + NAPI_CALL(env, napi_create_string_utf8(env, "ERR_TEST_CODE", -1, &code)); + NAPI_CALL(env, napi_create_type_error(env, code, message, &result)); return result; } @@ -70,9 +126,15 @@ void Init(napi_env env, napi_value exports, napi_value module, void* priv) { DECLARE_NAPI_PROPERTY("throwError", throwError), DECLARE_NAPI_PROPERTY("throwRangeError", throwRangeError), DECLARE_NAPI_PROPERTY("throwTypeError", throwTypeError), + DECLARE_NAPI_PROPERTY("throwErrorCode", throwErrorCode), + DECLARE_NAPI_PROPERTY("throwRangeErrorCode", throwRangeErrorCode), + DECLARE_NAPI_PROPERTY("throwTypeErrorCode", throwTypeErrorCode), DECLARE_NAPI_PROPERTY("createError", createError), DECLARE_NAPI_PROPERTY("createRangeError", createRangeError), DECLARE_NAPI_PROPERTY("createTypeError", createTypeError), + DECLARE_NAPI_PROPERTY("createErrorCode", createErrorCode), + DECLARE_NAPI_PROPERTY("createRangeErrorCode", createRangeErrorCode), + DECLARE_NAPI_PROPERTY("createTypeErrorCode", createTypeErrorCode), }; NAPI_CALL_RETURN_VOID(env, napi_define_properties( diff --git a/test/addons-napi/test_typedarray/test_typedarray.c b/test/addons-napi/test_typedarray/test_typedarray.c index 4194492cae8b3a..ffc118681bad9e 100644 --- a/test/addons-napi/test_typedarray/test_typedarray.c +++ b/test/addons-napi/test_typedarray/test_typedarray.c @@ -65,7 +65,7 @@ napi_value Multiply(napi_env env, napi_callback_info info) { output_doubles[i] = input_doubles[i] * multiplier; } } else { - napi_throw_error(env, "Typed array was of a type not expected by test."); + napi_throw_error(env, NULL, "Typed array was of a type not expected by test."); return NULL; } diff --git a/test/common/index.js b/test/common/index.js index 315e35d04195f4..e6dcd995dc6195 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -626,6 +626,59 @@ Object.defineProperty(exports, 'hasIntl', { } }); +// Useful for testing expected internal/error objects +exports.expectsError = function expectsError(fn, settings, exact) { + if (typeof fn !== 'function') { + exact = settings; + settings = fn; + fn = undefined; + } + function innerFn(error) { + assert.strictEqual(error.code, settings.code); + if ('type' in settings) { + const type = settings.type; + if (type !== Error && !Error.isPrototypeOf(type)) { + throw new TypeError('`settings.type` must inherit from `Error`'); + } + assert(error instanceof type, + `${error.name} is not instance of ${type.name}`); + let typeName = error.constructor.name; + if (typeName === 'NodeError' && type.name !== 'NodeError') { + typeName = Object.getPrototypeOf(error.constructor).name; + } + assert.strictEqual(typeName, type.name); + } + if ('message' in settings) { + const message = settings.message; + if (typeof message === 'string') { + assert.strictEqual(error.message, message); + } else { + assert(message.test(error.message), + `${error.message} does not match ${message}`); + } + } + if ('name' in settings) { + assert.strictEqual(error.name, settings.name); + } + if (error.constructor.name === 'AssertionError') { + ['generatedMessage', 'actual', 'expected', 'operator'].forEach((key) => { + if (key in settings) { + const actual = error[key]; + const expected = settings[key]; + assert.strictEqual(actual, expected, + `${key}: expected ${expected}, not ${actual}`); + } + }); + } + return true; + } + if (fn) { + assert.throws(fn, innerFn); + return; + } + return exports.mustCall(innerFn, exact); +}; + // Crash the process on unhandled rejections. exports.crashOnUnhandledRejection = function() { process.on('unhandledRejection', From aad36b2cd4d383dc36c16dd877abfae1af180cc8 Mon Sep 17 00:00:00 2001 From: Kyle Farnung Date: Tue, 27 Jun 2017 17:54:44 -0700 Subject: [PATCH 123/227] n-api: add napi_fatal_error API Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/13971 Reviewed-By: James M Snell Reviewed-By: Jason Ginchereau Reviewed-By: Ben Noordhuis Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson --- doc/api/n-api.md | 17 +++++++++++++++++ src/node_api.cc | 5 +++++ src/node_api.h | 9 +++++++++ test/addons-napi/test_fatal/binding.gyp | 8 ++++++++ test/addons-napi/test_fatal/test.js | 18 ++++++++++++++++++ test/addons-napi/test_fatal/test_fatal.c | 18 ++++++++++++++++++ 6 files changed, 75 insertions(+) create mode 100644 test/addons-napi/test_fatal/binding.gyp create mode 100644 test/addons-napi/test_fatal/test.js create mode 100644 test/addons-napi/test_fatal/test_fatal.c diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 875ced3579ecf0..8aac594f8323cd 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -533,6 +533,23 @@ Returns `napi_ok` if the API succeeded. This API returns true if an exception is pending. +### Fatal Errors + +In the event of an unrecoverable error in a native module, a fatal error can be +thrown to immediately terminate the process. + +#### napi_fatal_error + +```C +NAPI_EXTERN NAPI_NO_RETURN void napi_fatal_error(const char* location, const char* message); +``` + +- `[in] location`: Optional location at which the error occurred. +- `[in] message`: The message associated with the error. + +The function call does not return, the process will be terminated. ## Object Lifetime management diff --git a/src/node_api.cc b/src/node_api.cc index 25e8c6385572e7..58aeaf08979501 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -826,6 +826,11 @@ napi_status napi_get_last_error_info(napi_env env, return napi_ok; } +NAPI_NO_RETURN void napi_fatal_error(const char* location, + const char* message) { + node::FatalError(location, message); +} + napi_status napi_create_function(napi_env env, const char* utf8name, napi_callback cb, diff --git a/src/node_api.h b/src/node_api.h index af98789d418210..f4d097ad104cf9 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -37,6 +37,12 @@ # define NAPI_MODULE_EXPORT __attribute__((visibility("default"))) #endif +#ifdef __GNUC__ +#define NAPI_NO_RETURN __attribute__((noreturn)) +#else +#define NAPI_NO_RETURN +#endif + typedef void (*napi_addon_register_func)(napi_env env, napi_value exports, @@ -104,6 +110,9 @@ NAPI_EXTERN napi_status napi_get_last_error_info(napi_env env, const napi_extended_error_info** result); +NAPI_EXTERN NAPI_NO_RETURN void napi_fatal_error(const char* location, + const char* message); + // Getters for defined singletons NAPI_EXTERN napi_status napi_get_undefined(napi_env env, napi_value* result); NAPI_EXTERN napi_status napi_get_null(napi_env env, napi_value* result); diff --git a/test/addons-napi/test_fatal/binding.gyp b/test/addons-napi/test_fatal/binding.gyp new file mode 100644 index 00000000000000..ad661825f1fa9b --- /dev/null +++ b/test/addons-napi/test_fatal/binding.gyp @@ -0,0 +1,8 @@ +{ + "targets": [ + { + "target_name": "test_fatal", + "sources": [ "test_fatal.c" ] + } + ] +} diff --git a/test/addons-napi/test_fatal/test.js b/test/addons-napi/test_fatal/test.js new file mode 100644 index 00000000000000..aa37dc6803f0d5 --- /dev/null +++ b/test/addons-napi/test_fatal/test.js @@ -0,0 +1,18 @@ +'use strict'; +const common = require('../../common'); +const assert = require('assert'); +const child_process = require('child_process'); +const test_fatal = require(`./build/${common.buildType}/test_fatal`); + +// Test in a child process because the test code will trigger a fatal error +// that crashes the process. +if (process.argv[2] === 'child') { + test_fatal.Test(); + return; +} + +const p = child_process.spawnSync( + process.execPath, [ '--napi-modules', __filename, 'child' ]); +assert.ifError(p.error); +assert.ok(p.stderr.toString().includes( + 'FATAL ERROR: test_fatal::Test fatal message')); diff --git a/test/addons-napi/test_fatal/test_fatal.c b/test/addons-napi/test_fatal/test_fatal.c new file mode 100644 index 00000000000000..1e8a1cf7955d18 --- /dev/null +++ b/test/addons-napi/test_fatal/test_fatal.c @@ -0,0 +1,18 @@ +#include +#include "../common.h" + +napi_value Test(napi_env env, napi_callback_info info) { + napi_fatal_error("test_fatal::Test", "fatal message"); + return NULL; +} + +void Init(napi_env env, napi_value exports, napi_value module, void* priv) { + napi_property_descriptor properties[] = { + DECLARE_NAPI_PROPERTY("Test", Test), + }; + + NAPI_CALL_RETURN_VOID(env, napi_define_properties( + env, exports, sizeof(properties) / sizeof(*properties), properties)); +} + +NAPI_MODULE(addon, Init) From 3f3eaf996177190f9d71c68ec5aba3019fe430c4 Mon Sep 17 00:00:00 2001 From: "Song, Bintao Garfield" Date: Sun, 16 Jul 2017 15:08:33 +0800 Subject: [PATCH 124/227] test: replace string concat with template literal Replace the string concat at test/addons-napi/test_reference/test.js. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14269 Reviewed-By: Joyee Cheung Reviewed-By: Rich Trott Reviewed-By: Colin Ihrig --- test/addons-napi/test_reference/test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/addons-napi/test_reference/test.js b/test/addons-napi/test_reference/test.js index aaf5531f39e1f3..14932a74ca70b0 100644 --- a/test/addons-napi/test_reference/test.js +++ b/test/addons-napi/test_reference/test.js @@ -22,7 +22,7 @@ function runTests(i, title, tests) { try { tests[i](); } catch (e) { - console.error('Test failed: ' + title); + console.error(`Test failed: ${title}`); throw e; } setImmediate(() => { From 4b9773effa36b440b95faf58c42cca1e7a8ccd1d Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Thu, 20 Jul 2017 21:01:51 +0300 Subject: [PATCH 125/227] doc: fix some links Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14400 Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Yuta Hiroto Reviewed-By: Khaidi Chu Reviewed-By: James M Snell --- doc/api/dns.md | 4 ++-- doc/api/n-api.md | 1 + doc/guides/writing-tests.md | 11 +++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/api/dns.md b/doc/api/dns.md index e4321b487340cf..c293e2bfce08c9 100644 --- a/doc/api/dns.md +++ b/doc/api/dns.md @@ -175,7 +175,7 @@ added: v0.1.27 - `rrtype` {string} Resource record type. Default: `'A'`. - `callback` {Function} - `err` {Error} - - `records` {string[] | Object[] | string[][] | Object} + - `records` {string[] | Object[] | Object} Uses the DNS protocol to resolve a hostname (e.g. `'nodejs.org'`) into an array of the resource records. The `callback` function has arguments @@ -362,7 +362,7 @@ added: v0.1.27 - `hostname` {string} - `callback` {Function} - `err` {Error} - - `records` {string[][]} + - `records` {string[]} Uses the DNS protocol to resolve text queries (`TXT` records) for the `hostname`. The `records` argument passed to the `callback` function is a diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 8aac594f8323cd..91780846ff2e4a 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3204,6 +3204,7 @@ support it: [`napi_queue_async_work`]: #n_api_napi_queue_async_work [`napi_reference_ref`]: #n_api_napi_reference_ref [`napi_reference_unref`]: #n_api_napi_reference_unref +[`napi_throw`]: #n_api_napi_throw [`napi_throw_error`]: #n_api_napi_throw_error [`napi_throw_range_error`]: #n_api_napi_throw_range_error [`napi_throw_type_error`]: #n_api_napi_throw_type_error diff --git a/doc/guides/writing-tests.md b/doc/guides/writing-tests.md index aac098640aec9e..cc87886656c06d 100644 --- a/doc/guides/writing-tests.md +++ b/doc/guides/writing-tests.md @@ -92,7 +92,7 @@ The test checks functionality in the `http` module. Most tests use the `assert` module to confirm expectations of the test. The require statements are sorted in -[ASCII](http://man7.org/linux/man-pages/man7/ascii.7.html) order (digits, upper +[ASCII][] order (digits, upper case, `_`, lower case). ### **Lines 10-21** @@ -252,9 +252,9 @@ assert.throws( For performance considerations, we only use a selected subset of ES.Next features in JavaScript code in the `lib` directory. However, when writing tests, for the ease of backporting, it is encouraged to use those ES.Next -features that can be used directly without a flag in [all maintained branches] -(https://github.com/nodejs/lts). [node.green](http://node.green/) lists -available features in each release. +features that can be used directly without a flag in +[all maintained branches][]. [node.green][] lists available features +in each release. For example: @@ -279,8 +279,7 @@ functions worked correctly with the `beforeExit` event, then it might be named ### Web Platform Tests Some of the tests for the WHATWG URL implementation (named -`test-whatwg-url-*.js`) are imported from the -[Web Platform Tests Project](https://github.com/w3c/web-platform-tests/tree/master/url). +`test-whatwg-url-*.js`) are imported from the [Web Platform Tests Project][]. These imported tests will be wrapped like this: ```js From f4d1cae6345cef45f6d8734f72c8f4ae74c57ce3 Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Thu, 20 Jul 2017 17:03:11 +0200 Subject: [PATCH 126/227] n-api: add fast paths for integer getters Ref: https://github.com/nodejs/node/issues/14379 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14393 Reviewed-By: Ben Noordhuis Reviewed-By: James M Snell Reviewed-By: Timothy Gu Reviewed-By: Colin Ihrig Reviewed-By: Jason Ginchereau --- src/node_api.cc | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/node_api.cc b/src/node_api.cc index 58aeaf08979501..986a9facccac9d 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -1912,6 +1912,12 @@ napi_status napi_get_value_int32(napi_env env, CHECK_ARG(env, result); v8::Local val = v8impl::V8LocalValueFromJsValue(value); + + if (val->IsInt32()) { + *result = val.As()->Value(); + return napi_clear_last_error(env); + } + RETURN_STATUS_IF_FALSE(env, val->IsNumber(), napi_number_expected); v8::Isolate* isolate = env->isolate; @@ -1931,6 +1937,12 @@ napi_status napi_get_value_uint32(napi_env env, CHECK_ARG(env, result); v8::Local val = v8impl::V8LocalValueFromJsValue(value); + + if (val->IsUint32()) { + *result = val.As()->Value(); + return napi_clear_last_error(env); + } + RETURN_STATUS_IF_FALSE(env, val->IsNumber(), napi_number_expected); v8::Isolate* isolate = env->isolate; @@ -1950,6 +1962,13 @@ napi_status napi_get_value_int64(napi_env env, CHECK_ARG(env, result); v8::Local val = v8impl::V8LocalValueFromJsValue(value); + + // This is still a fast path very likely to be taken. + if (val->IsInt32()) { + *result = val.As()->Value(); + return napi_clear_last_error(env); + } + RETURN_STATUS_IF_FALSE(env, val->IsNumber(), napi_number_expected); // v8::Value::IntegerValue() converts NaN to INT64_MIN, inconsistent with From 1acab66df437c07cd655dc72e849f386162abb0c Mon Sep 17 00:00:00 2001 From: Kyle Farnung Date: Wed, 12 Jul 2017 17:14:00 -0700 Subject: [PATCH 127/227] n-api: directly create Local from Persistent The `v8::PersistentBase.Get` method didn't exist in node 4 and it's just a helper which creates a new `v8::Local` from the given object. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14211 Reviewed-By: Timothy Gu Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen Reviewed-By: James M Snell Reviewed-By: Michael Dawson Reviewed-By: Jason Ginchereau --- src/node_api.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/node_api.cc b/src/node_api.cc index 986a9facccac9d..06f18562e7d3c0 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -58,7 +58,8 @@ struct napi_env__ { (destination)->SetInternalFieldCount((field_count)); \ (env)->prefix ## _template.Reset(isolate, (destination)); \ } else { \ - (destination) = env->prefix ## _template.Get(isolate); \ + (destination) = v8::Local::New( \ + isolate, env->prefix ## _template); \ } \ } while (0) From 06b127346451bbbb34872a63c8b04d190db4c660 Mon Sep 17 00:00:00 2001 From: Pratik Jain Date: Mon, 24 Jul 2017 11:37:31 +0000 Subject: [PATCH 128/227] test: changed error message validator Replaced TypeError with RegEx to match the exact error message in file test/addons-napi/test_properties/test.js. The RegEx will check the validity of the error being thrown. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14443 Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Yuta Hiroto Reviewed-By: Vse Mozhet Byt Reviewed-By: Rich Trott Reviewed-By: Anna Henningsen --- test/addons-napi/test_properties/test.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/test/addons-napi/test_properties/test.js b/test/addons-napi/test_properties/test.js index 8d11a669d46daf..0a3bbee853610f 100644 --- a/test/addons-napi/test_properties/test.js +++ b/test/addons-napi/test_properties/test.js @@ -1,6 +1,7 @@ 'use strict'; const common = require('../../common'); const assert = require('assert'); +const readonlyErrorRE = /^TypeError: Cannot assign to read only property '.*' of object '#'$/; // Testing api calls for defining properties const test_object = require(`./build/${common.buildType}/test_properties`); @@ -12,7 +13,7 @@ assert.strictEqual(test_object.readwriteValue, 1); test_object.readwriteValue = 2; assert.strictEqual(test_object.readwriteValue, 2); -assert.throws(() => { test_object.readonlyValue = 3; }, TypeError); +assert.throws(() => { test_object.readonlyValue = 3; }, readonlyErrorRE); assert.ok(test_object.hiddenValue); @@ -42,11 +43,11 @@ assert.strictEqual(symbolDescription, 'NameKeySymbol'); test_object.readwriteAccessor1 = 1; assert.strictEqual(test_object.readwriteAccessor1, 1); assert.strictEqual(test_object.readonlyAccessor1, 1); -assert.throws(() => { test_object.readonlyAccessor1 = 3; }, TypeError); +assert.throws(() => { test_object.readonlyAccessor1 = 3; }, readonlyErrorRE); test_object.readwriteAccessor2 = 2; assert.strictEqual(test_object.readwriteAccessor2, 2); assert.strictEqual(test_object.readonlyAccessor2, 2); -assert.throws(() => { test_object.readonlyAccessor2 = 3; }, TypeError); +assert.throws(() => { test_object.readonlyAccessor2 = 3; }, readonlyErrorRE); assert.strictEqual(test_object.hasNamedProperty(test_object, 'echo'), true); assert.strictEqual(test_object.hasNamedProperty(test_object, 'hiddenValue'), From 86b101cb60c83398a88329f95ea943c3fe6819f7 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Thu, 13 Jul 2017 11:35:55 +0300 Subject: [PATCH 129/227] n-api: re-use napi_env between modules Store the `napi_env` on the global object at a private key. This gives us one `napi_env` per context. Refs: https://github.com/nodejs/node/issues/14367 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14217 Reviewed-By: Ben Noordhuis Reviewed-By: Timothy Gu --- src/node_api.cc | 45 +++++++++++++++++-- test/addons-napi/test_env_sharing/binding.gyp | 12 +++++ .../test_env_sharing/compare_env.c | 22 +++++++++ test/addons-napi/test_env_sharing/store_env.c | 12 +++++ test/addons-napi/test_env_sharing/test.js | 10 +++++ 5 files changed, 98 insertions(+), 3 deletions(-) create mode 100644 test/addons-napi/test_env_sharing/binding.gyp create mode 100644 test/addons-napi/test_env_sharing/compare_env.c create mode 100644 test/addons-napi/test_env_sharing/store_env.c create mode 100644 test/addons-napi/test_env_sharing/test.js diff --git a/src/node_api.cc b/src/node_api.cc index 06f18562e7d3c0..3cdd4b8a970f28 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -712,6 +712,45 @@ bool FindWrapper(v8::Local obj, return true; } +static void DeleteEnv(napi_env env, void* data, void* hint) { + delete env; +} + +napi_env GetEnv(v8::Local context) { + napi_env result; + + auto isolate = context->GetIsolate(); + auto global = context->Global(); + + // In the case of the string for which we grab the private and the value of + // the private on the global object we can call .ToLocalChecked() directly + // because we need to stop hard if either of them is empty. + // + // Re https://github.com/nodejs/node/pull/14217#discussion_r128775149 + auto key = v8::Private::ForApi(isolate, + v8::String::NewFromOneByte(isolate, + reinterpret_cast("N-API Environment"), + v8::NewStringType::kInternalized).ToLocalChecked()); + auto value = global->GetPrivate(context, key).ToLocalChecked(); + + if (value->IsExternal()) { + result = static_cast(value.As()->Value()); + } else { + result = new napi_env__(isolate); + auto external = v8::External::New(isolate, result); + + // We must also stop hard if the result of assigning the env to the global + // is either nothing or false. + CHECK(global->SetPrivate(context, key, external).FromJust()); + + // Create a self-destructing reference to external that will get rid of the + // napi_env when external goes out of scope. + Reference::New(result, external, 0, true, DeleteEnv, nullptr, nullptr); + } + + return result; +} + } // end of namespace v8impl // Intercepts the Node-V8 module registration callback. Converts parameters @@ -723,9 +762,9 @@ void napi_module_register_cb(v8::Local exports, void* priv) { napi_module* mod = static_cast(priv); - // Create a new napi_env for this module. Once module unloading is supported - // we shall have to call delete on this object from there. - napi_env env = new napi_env__(context->GetIsolate()); + // Create a new napi_env for this module or reference one if a pre-existing + // one is found. + napi_env env = v8impl::GetEnv(context); mod->nm_register_func( env, diff --git a/test/addons-napi/test_env_sharing/binding.gyp b/test/addons-napi/test_env_sharing/binding.gyp new file mode 100644 index 00000000000000..5699a8391dd347 --- /dev/null +++ b/test/addons-napi/test_env_sharing/binding.gyp @@ -0,0 +1,12 @@ +{ + "targets": [ + { + "target_name": "store_env", + "sources": [ "store_env.c" ] + }, + { + "target_name": "compare_env", + "sources": [ "compare_env.c" ] + } + ] +} diff --git a/test/addons-napi/test_env_sharing/compare_env.c b/test/addons-napi/test_env_sharing/compare_env.c new file mode 100644 index 00000000000000..b209c8f6d8e968 --- /dev/null +++ b/test/addons-napi/test_env_sharing/compare_env.c @@ -0,0 +1,22 @@ +#include +#include "../common.h" + +napi_value compare(napi_env env, napi_callback_info info) { + napi_value external; + size_t argc = 1; + void* data; + napi_value return_value; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &external, NULL, NULL)); + NAPI_CALL(env, napi_get_value_external(env, external, &data)); + NAPI_CALL(env, napi_get_boolean(env, ((napi_env)data) == env, &return_value)); + + return return_value; +} + +void Init(napi_env env, napi_value exports, napi_value module, void* context) { + napi_property_descriptor prop = DECLARE_NAPI_PROPERTY("exports", compare); + NAPI_CALL_RETURN_VOID(env, napi_define_properties(env, module, 1, &prop)); +} + +NAPI_MODULE(compare_env, Init) diff --git a/test/addons-napi/test_env_sharing/store_env.c b/test/addons-napi/test_env_sharing/store_env.c new file mode 100644 index 00000000000000..391f6faa27648e --- /dev/null +++ b/test/addons-napi/test_env_sharing/store_env.c @@ -0,0 +1,12 @@ +#include +#include "../common.h" + +void Init(napi_env env, napi_value exports, napi_value module, void* context) { + napi_value external; + NAPI_CALL_RETURN_VOID(env, + napi_create_external(env, env, NULL, NULL, &external)); + NAPI_CALL_RETURN_VOID(env, + napi_set_named_property(env, module, "exports", external)); +} + +NAPI_MODULE(store_env, Init) diff --git a/test/addons-napi/test_env_sharing/test.js b/test/addons-napi/test_env_sharing/test.js new file mode 100644 index 00000000000000..6e21bf4c638b80 --- /dev/null +++ b/test/addons-napi/test_env_sharing/test.js @@ -0,0 +1,10 @@ +'use strict'; + +const common = require('../../common'); +const storeEnv = require(`./build/${common.buildType}/store_env`); +const compareEnv = require(`./build/${common.buildType}/compare_env`); +const assert = require('assert'); + +assert.strictEqual(compareEnv(storeEnv), true, + 'N-API environment pointers in two different modules have ' + + 'the same value'); From bd032a158aa5be0ecd5e1c1e6b1a535d6977bc83 Mon Sep 17 00:00:00 2001 From: Shivanth MP Date: Thu, 20 Jul 2017 01:50:43 +0530 Subject: [PATCH 130/227] n-api: add support for DataView Basic support for Dataview is added in this commit. This is achieved by using three functions, napi_create_dataview(), napi_is_dataview() and napi_get_dataview_info(). Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14382 Fixes: https://github.com/nodejs/node/issues/13926 Reviewed-By: Anna Henningsen Reviewed-By: Timothy Gu Reviewed-By: Colin Ihrig --- doc/api/n-api.md | 84 +++++++++++++++++++ src/node_api.cc | 66 +++++++++++++++ src/node_api.h | 15 ++++ test/addons-napi/test_dataview/binding.gyp | 8 ++ test/addons-napi/test_dataview/test.js | 14 ++++ .../addons-napi/test_dataview/test_dataview.c | 49 +++++++++++ 6 files changed, 236 insertions(+) create mode 100644 test/addons-napi/test_dataview/binding.gyp create mode 100644 test/addons-napi/test_dataview/test.js create mode 100644 test/addons-napi/test_dataview/test_dataview.c diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 91780846ff2e4a..8f8aa76f83ac7b 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -1331,6 +1331,40 @@ JavaScript TypedArray Objects are described in [Section 22.2](https://tc39.github.io/ecma262/#sec-typedarray-objects) of the ECMAScript Language Specification. + +#### *napi_create_dataview* + + +```C +napi_status napi_create_dataview(napi_env env, + size_t byte_length, + napi_value arraybuffer, + size_t byte_offset, + napi_value* result) + +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] length`: Number of elements in the DataView. +- `[in] arraybuffer`: ArrayBuffer underlying the DataView. +- `[in] byte_offset`: The byte offset within the ArrayBuffer from which to + start projecting the DataView. +- `[out] result`: A `napi_value` representing a JavaScript DataView. + +Returns `napi_ok` if the API succeeded. + +This API creates a JavaScript DataView object over an existing ArrayBuffer. +DataView objects provide an array-like view over an underlying data buffer, +but one which allows items of different size and type in the ArrayBuffer. + +It is required that `byte_length + byte_offset` is less than or equal to the +size in bytes of the array passed in. If not, a RangeError exception is raised. + +JavaScript DataView Objects are described in +[Section 24.3][] of the ECMAScript Language Specification. + ### Functions to convert from C types to N-API #### *napi_create_number* + +```C +napi_status napi_get_dataview_info(napi_env env, + napi_value dataview, + size_t* byte_length, + void** data, + napi_value* arraybuffer, + size_t* byte_offset) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] dataview`: `napi_value` representing the DataView whose + properties to query. +- `[out] byte_length`: Number of bytes in the DataView. +- `[out] data`: The data buffer underlying the DataView. +- `[out] arraybuffer`: ArrayBuffer underlying the DataView. +- `[out] byte_offset`: The byte offset within the data buffer from which + to start projecting the DataView. + +Returns `napi_ok` if the API succeeded. + +This API returns various properties of a DataView. + + #### *napi_get_value_bool* + +```C +napi_status napi_is_dataview(napi_env env, napi_value value, bool* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: The JavaScript value to check. +- `[out] result`: Whether the given `napi_value` represents a DataView. + +Returns `napi_ok` if the API succeeded. + +This API checks if the Object passed in is a DataView. + ### *napi_strict_equals* +```C +napi_status napi_create_int32(napi_env env, int32_t value, napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: Integer value to be represented in JavaScript. +- `[out] result`: A `napi_value` representing a JavaScript Number. + +Returns `napi_ok` if the API succeeded. + +This API is used to convert from the C `int32_t` type to the JavaScript +Number type. + +The JavaScript Number type is described in +[Section 6.1.6](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type) +of the ECMAScript Language Specification. + +#### *napi_create_uint32* + +```C +napi_status napi_create_uint32(napi_env env, uint32_t value, napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: Unsigned integer value to be represented in JavaScript. +- `[out] result`: A `napi_value` representing a JavaScript Number. + +Returns `napi_ok` if the API succeeded. + +This API is used to convert from the C `uint32_t` type to the JavaScript +Number type. + +The JavaScript Number type is described in +[Section 6.1.6](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type) +of the ECMAScript Language Specification. + +#### *napi_create_int64* + +```C +napi_status napi_create_int64(napi_env env, int64_t value, napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: Integer value to be represented in JavaScript. +- `[out] result`: A `napi_value` representing a JavaScript Number. + +Returns `napi_ok` if the API succeeded. + +This API is used to convert from the C `int64_t` type to the JavaScript +Number type. + +The JavaScript Number type is described in +[Section 6.1.6](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type) +of the ECMAScript Language Specification. Note the complete range of `int64_t` +cannot be represented with full precision in JavaScript. Integer values +outside the range of +[`Number.MIN_SAFE_INTEGER`](https://tc39.github.io/ecma262/#sec-number.min_safe_integer) +-(2^53 - 1) - +[`Number.MAX_SAFE_INTEGER`](https://tc39.github.io/ecma262/#sec-number.max_safe_integer) +(2^53 - 1) will lose precision. + +#### *napi_create_double* + ```C -napi_status napi_create_number(napi_env env, double value, napi_value* result) +napi_status napi_create_double(napi_env env, double value, napi_value* result) ``` - `[in] env`: The environment that the API is invoked under. @@ -1386,7 +1455,7 @@ napi_status napi_create_number(napi_env env, double value, napi_value* result) Returns `napi_ok` if the API succeeded. -This API is used to convert from the C double type to the JavaScript +This API is used to convert from the C `double` type to the JavaScript Number type. The JavaScript Number type is described in @@ -2170,7 +2239,7 @@ status = napi_create_object(env, &obj); if (status != napi_ok) return status; // Create a napi_value for 123 -status = napi_create_number(env, 123, &value); +status = napi_create_int32(env, 123, &value); if (status != napi_ok) return status; // obj.myProp = 123 @@ -2244,9 +2313,9 @@ if (status != napi_ok) return status; // Create napi_values for 123 and 456 napi_value fooValue, barValue; -status = napi_create_number(env, 123, &fooValue); +status = napi_create_int32(env, 123, &fooValue); if (status != napi_ok) return status; -status = napi_create_number(env, 456, &barValue); +status = napi_create_int32(env, 456, &barValue); if (status != napi_ok) return status; // Set the properties @@ -2707,7 +2776,7 @@ status = napi_get_named_property(env, global, "AddTwo", &add_two); if (status != napi_ok) return; // const arg = 1337 -status = napi_create_number(env, 1337, &arg); +status = napi_create_int32(env, 1337, &arg); if (status != napi_ok) return; napi_value* argv = &arg; diff --git a/src/node_api.cc b/src/node_api.cc index 932ec858963558..948f6afef55291 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -821,9 +821,7 @@ const char* error_messages[] = {nullptr, "The async work item was cancelled", "napi_escape_handle already called on scope"}; -static napi_status napi_clear_last_error(napi_env env) { - CHECK_ENV(env); - +static inline napi_status napi_clear_last_error(napi_env env) { env->last_error.error_code = napi_ok; // TODO(boingoing): Should this be a callback? @@ -832,13 +830,13 @@ static napi_status napi_clear_last_error(napi_env env) { return napi_ok; } -static napi_status napi_set_last_error(napi_env env, napi_status error_code, - uint32_t engine_error_code, - void* engine_reserved) { +static inline +napi_status napi_set_last_error(napi_env env, napi_status error_code, + uint32_t engine_error_code, + void* engine_reserved) { env->last_error.error_code = error_code; env->last_error.engine_error_code = engine_error_code; env->last_error.engine_reserved = engine_reserved; - return error_code; } @@ -1439,42 +1437,42 @@ napi_status napi_get_prototype(napi_env env, } napi_status napi_create_object(napi_env env, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, result); *result = v8impl::JsValueFromV8LocalValue( v8::Object::New(env->isolate)); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_create_array(napi_env env, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, result); *result = v8impl::JsValueFromV8LocalValue( v8::Array::New(env->isolate)); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_create_array_with_length(napi_env env, size_t length, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, result); *result = v8impl::JsValueFromV8LocalValue( v8::Array::New(env->isolate, length)); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_create_string_latin1(napi_env env, const char* str, size_t length, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, result); auto isolate = env->isolate; @@ -1486,28 +1484,28 @@ napi_status napi_create_string_latin1(napi_env env, CHECK_MAYBE_EMPTY(env, str_maybe, napi_generic_failure); *result = v8impl::JsValueFromV8LocalValue(str_maybe.ToLocalChecked()); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_create_string_utf8(napi_env env, const char* str, size_t length, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, result); v8::Local s; CHECK_NEW_FROM_UTF8_LEN(env, s, str, length); *result = v8impl::JsValueFromV8LocalValue(s); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_create_string_utf16(napi_env env, const char16_t* str, size_t length, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, result); auto isolate = env->isolate; @@ -1519,19 +1517,55 @@ napi_status napi_create_string_utf16(napi_env env, CHECK_MAYBE_EMPTY(env, str_maybe, napi_generic_failure); *result = v8impl::JsValueFromV8LocalValue(str_maybe.ToLocalChecked()); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } -napi_status napi_create_number(napi_env env, +napi_status napi_create_double(napi_env env, double value, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, result); *result = v8impl::JsValueFromV8LocalValue( v8::Number::New(env->isolate, value)); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); +} + +napi_status napi_create_int32(napi_env env, + int32_t value, + napi_value* result) { + CHECK_ENV(env); + CHECK_ARG(env, result); + + *result = v8impl::JsValueFromV8LocalValue( + v8::Integer::New(env->isolate, value)); + + return napi_clear_last_error(env); +} + +napi_status napi_create_uint32(napi_env env, + uint32_t value, + napi_value* result) { + CHECK_ENV(env); + CHECK_ARG(env, result); + + *result = v8impl::JsValueFromV8LocalValue( + v8::Integer::NewFromUnsigned(env->isolate, value)); + + return napi_clear_last_error(env); +} + +napi_status napi_create_int64(napi_env env, + int64_t value, + napi_value* result) { + CHECK_ENV(env); + CHECK_ARG(env, result); + + *result = v8impl::JsValueFromV8LocalValue( + v8::Number::New(env->isolate, static_cast(value))); + + return napi_clear_last_error(env); } napi_status napi_get_boolean(napi_env env, bool value, napi_value* result) { @@ -1552,7 +1586,7 @@ napi_status napi_get_boolean(napi_env env, bool value, napi_value* result) { napi_status napi_create_symbol(napi_env env, napi_value description, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, result); v8::Isolate* isolate = env->isolate; @@ -1567,7 +1601,7 @@ napi_status napi_create_symbol(napi_env env, v8::Symbol::New(isolate, desc.As())); } - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } static napi_status set_error_code(napi_env env, @@ -1627,7 +1661,7 @@ napi_status napi_create_error(napi_env env, napi_value code, napi_value msg, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, msg); CHECK_ARG(env, result); @@ -1641,14 +1675,14 @@ napi_status napi_create_error(napi_env env, *result = v8impl::JsValueFromV8LocalValue(error_obj); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_create_type_error(napi_env env, napi_value code, napi_value msg, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, msg); CHECK_ARG(env, result); @@ -1662,14 +1696,14 @@ napi_status napi_create_type_error(napi_env env, *result = v8impl::JsValueFromV8LocalValue(error_obj); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_create_range_error(napi_env env, napi_value code, napi_value msg, napi_value* result) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, msg); CHECK_ARG(env, result); @@ -1683,7 +1717,7 @@ napi_status napi_create_range_error(napi_env env, *result = v8impl::JsValueFromV8LocalValue(error_obj); - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_typeof(napi_env env, @@ -1954,14 +1988,13 @@ napi_status napi_get_value_int32(napi_env env, if (val->IsInt32()) { *result = val.As()->Value(); - return napi_clear_last_error(env); - } - - RETURN_STATUS_IF_FALSE(env, val->IsNumber(), napi_number_expected); + } else { + RETURN_STATUS_IF_FALSE(env, val->IsNumber(), napi_number_expected); - v8::Isolate* isolate = env->isolate; - v8::Local context = isolate->GetCurrentContext(); - *result = val->Int32Value(context).FromJust(); + // Empty context: https://github.com/nodejs/node/issues/14379 + v8::Local context; + *result = val->Int32Value(context).FromJust(); + } return napi_clear_last_error(env); } @@ -1979,14 +2012,13 @@ napi_status napi_get_value_uint32(napi_env env, if (val->IsUint32()) { *result = val.As()->Value(); - return napi_clear_last_error(env); - } - - RETURN_STATUS_IF_FALSE(env, val->IsNumber(), napi_number_expected); + } else { + RETURN_STATUS_IF_FALSE(env, val->IsNumber(), napi_number_expected); - v8::Isolate* isolate = env->isolate; - v8::Local context = isolate->GetCurrentContext(); - *result = val->Uint32Value(context).FromJust(); + // Empty context: https://github.com/nodejs/node/issues/14379 + v8::Local context; + *result = val->Uint32Value(context).FromJust(); + } return napi_clear_last_error(env); } @@ -2016,8 +2048,8 @@ napi_status napi_get_value_int64(napi_env env, if (std::isnan(doubleValue)) { *result = 0; } else { - v8::Isolate* isolate = env->isolate; - v8::Local context = isolate->GetCurrentContext(); + // Empty context: https://github.com/nodejs/node/issues/14379 + v8::Local context; *result = val->IntegerValue(context).FromJust(); } @@ -2796,11 +2828,10 @@ napi_status napi_get_buffer_info(napi_env env, napi_value value, void** data, size_t* length) { - NAPI_PREAMBLE(env); + CHECK_ENV(env); CHECK_ARG(env, value); - v8::Local buffer = - v8impl::V8LocalValueFromJsValue(value).As(); + v8::Local buffer = v8impl::V8LocalValueFromJsValue(value); if (data != nullptr) { *data = node::Buffer::Data(buffer); @@ -2809,7 +2840,7 @@ napi_status napi_get_buffer_info(napi_env env, *length = node::Buffer::Length(buffer); } - return GET_RETURN_STATUS(env); + return napi_clear_last_error(env); } napi_status napi_is_arraybuffer(napi_env env, napi_value value, bool* result) { diff --git a/src/node_api.h b/src/node_api.h index 5a2ba946ec8114..cf4128be210fe7 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -127,9 +127,18 @@ NAPI_EXTERN napi_status napi_create_array(napi_env env, napi_value* result); NAPI_EXTERN napi_status napi_create_array_with_length(napi_env env, size_t length, napi_value* result); -NAPI_EXTERN napi_status napi_create_number(napi_env env, +NAPI_EXTERN napi_status napi_create_double(napi_env env, double value, napi_value* result); +NAPI_EXTERN napi_status napi_create_int32(napi_env env, + int32_t value, + napi_value* result); +NAPI_EXTERN napi_status napi_create_uint32(napi_env env, + uint32_t value, + napi_value* result); +NAPI_EXTERN napi_status napi_create_int64(napi_env env, + int64_t value, + napi_value* result); NAPI_EXTERN napi_status napi_create_string_latin1(napi_env env, const char* str, size_t length, diff --git a/test/addons-napi/2_function_arguments/binding.c b/test/addons-napi/2_function_arguments/binding.c index a5ccac7b61c458..92f89fd2ffa199 100644 --- a/test/addons-napi/2_function_arguments/binding.c +++ b/test/addons-napi/2_function_arguments/binding.c @@ -24,7 +24,7 @@ napi_value Add(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_value_double(env, args[1], &value1)); napi_value sum; - NAPI_CALL(env, napi_create_number(env, value0 + value1, &sum)); + NAPI_CALL(env, napi_create_double(env, value0 + value1, &sum)); return sum; } diff --git a/test/addons-napi/6_object_wrap/myobject.cc b/test/addons-napi/6_object_wrap/myobject.cc index c2557facc06382..56b00ddae49a32 100644 --- a/test/addons-napi/6_object_wrap/myobject.cc +++ b/test/addons-napi/6_object_wrap/myobject.cc @@ -86,7 +86,7 @@ napi_value MyObject::GetValue(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_unwrap(env, _this, reinterpret_cast(&obj))); napi_value num; - NAPI_CALL(env, napi_create_number(env, obj->value_, &num)); + NAPI_CALL(env, napi_create_double(env, obj->value_, &num)); return num; } @@ -116,7 +116,7 @@ napi_value MyObject::PlusOne(napi_env env, napi_callback_info info) { obj->value_ += 1; napi_value num; - NAPI_CALL(env, napi_create_number(env, obj->value_, &num)); + NAPI_CALL(env, napi_create_double(env, obj->value_, &num)); return num; } @@ -140,7 +140,7 @@ napi_value MyObject::Multiply(napi_env env, napi_callback_info info) { const int kArgCount = 1; napi_value argv[kArgCount]; - NAPI_CALL(env, napi_create_number(env, obj->value_ * multiple, argv)); + NAPI_CALL(env, napi_create_double(env, obj->value_ * multiple, argv)); napi_value instance; NAPI_CALL(env, napi_new_instance(env, cons, kArgCount, argv, &instance)); diff --git a/test/addons-napi/7_factory_wrap/myobject.cc b/test/addons-napi/7_factory_wrap/myobject.cc index d6b374d7bb7ff8..4a2b284439ddb9 100644 --- a/test/addons-napi/7_factory_wrap/myobject.cc +++ b/test/addons-napi/7_factory_wrap/myobject.cc @@ -45,7 +45,7 @@ napi_value MyObject::New(napi_env env, napi_callback_info info) { if (valuetype == napi_undefined) { obj->counter_ = 0; } else { - NAPI_CALL(env, napi_get_value_double(env, args[0], &obj->counter_)); + NAPI_CALL(env, napi_get_value_uint32(env, args[0], &obj->counter_)); } obj->env_ = env; @@ -88,7 +88,7 @@ napi_value MyObject::PlusOne(napi_env env, napi_callback_info info) { obj->counter_ += 1; napi_value num; - NAPI_CALL(env, napi_create_number(env, obj->counter_, &num)); + NAPI_CALL(env, napi_create_uint32(env, obj->counter_, &num)); return num; } diff --git a/test/addons-napi/7_factory_wrap/myobject.h b/test/addons-napi/7_factory_wrap/myobject.h index c0b8522c609c4c..28ca94d16e30dd 100644 --- a/test/addons-napi/7_factory_wrap/myobject.h +++ b/test/addons-napi/7_factory_wrap/myobject.h @@ -18,7 +18,7 @@ class MyObject { static napi_ref constructor; static napi_value New(napi_env env, napi_callback_info info); static napi_value PlusOne(napi_env env, napi_callback_info info); - double counter_; + uint32_t counter_; napi_env env_; napi_ref wrapper_; }; diff --git a/test/addons-napi/8_passing_wrapped/binding.cc b/test/addons-napi/8_passing_wrapped/binding.cc index 581638bdfa2b07..c22ac6442f6c4e 100644 --- a/test/addons-napi/8_passing_wrapped/binding.cc +++ b/test/addons-napi/8_passing_wrapped/binding.cc @@ -24,7 +24,7 @@ napi_value Add(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_unwrap(env, args[1], reinterpret_cast(&obj2))); napi_value sum; - NAPI_CALL(env, napi_create_number(env, obj1->Val() + obj2->Val(), &sum)); + NAPI_CALL(env, napi_create_double(env, obj1->Val() + obj2->Val(), &sum)); return sum; } diff --git a/test/addons-napi/test_async/test_async.cc b/test/addons-napi/test_async/test_async.cc index f257b268b93159..ca76fa2d33b132 100644 --- a/test/addons-napi/test_async/test_async.cc +++ b/test/addons-napi/test_async/test_async.cc @@ -52,7 +52,7 @@ void Complete(napi_env env, napi_status status, void* data) { napi_value argv[2]; NAPI_CALL_RETURN_VOID(env, napi_get_null(env, &argv[0])); - NAPI_CALL_RETURN_VOID(env, napi_create_number(env, c->_output, &argv[1])); + NAPI_CALL_RETURN_VOID(env, napi_create_int32(env, c->_output, &argv[1])); napi_value callback; NAPI_CALL_RETURN_VOID(env, napi_get_reference_value(env, c->_callback, &callback)); diff --git a/test/addons-napi/test_buffer/test_buffer.c b/test/addons-napi/test_buffer/test_buffer.c index 880149e1ff8561..0e12bedfb5602f 100644 --- a/test/addons-napi/test_buffer/test_buffer.c +++ b/test/addons-napi/test_buffer/test_buffer.c @@ -55,7 +55,7 @@ napi_value newExternalBuffer(napi_env env, napi_callback_info info) { napi_value getDeleterCallCount(napi_env env, napi_callback_info info) { napi_value callCount; - NAPI_CALL(env, napi_create_number(env, deleterCallCount, &callCount)); + NAPI_CALL(env, napi_create_int32(env, deleterCallCount, &callCount)); return callCount; } diff --git a/test/addons-napi/test_constructor/test_constructor.c b/test/addons-napi/test_constructor/test_constructor.c index 220d564753ca10..a991dab8533bf1 100644 --- a/test/addons-napi/test_constructor/test_constructor.c +++ b/test/addons-napi/test_constructor/test_constructor.c @@ -12,7 +12,7 @@ napi_value GetValue(napi_env env, napi_callback_info info) { NAPI_ASSERT(env, argc == 0, "Wrong number of arguments"); napi_value number; - NAPI_CALL(env, napi_create_number(env, value_, &number)); + NAPI_CALL(env, napi_create_double(env, value_, &number)); return number; } @@ -53,7 +53,7 @@ napi_value GetStaticValue(napi_env env, napi_callback_info info) { NAPI_ASSERT(env, argc == 0, "Wrong number of arguments"); napi_value number; - NAPI_CALL(env, napi_create_number(env, static_value_, &number)); + NAPI_CALL(env, napi_create_double(env, static_value_, &number)); return number; } @@ -61,7 +61,7 @@ napi_value GetStaticValue(napi_env env, napi_callback_info info) { void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_value number; - NAPI_CALL_RETURN_VOID(env, napi_create_number(env, value_, &number)); + NAPI_CALL_RETURN_VOID(env, napi_create_double(env, value_, &number)); napi_property_descriptor properties[] = { { "echo", 0, Echo, 0, 0, 0, napi_enumerable, 0 }, diff --git a/test/addons-napi/test_conversions/test_conversions.c b/test/addons-napi/test_conversions/test_conversions.c index 637cff43b82c4d..a8d526c763c9a2 100644 --- a/test/addons-napi/test_conversions/test_conversions.c +++ b/test/addons-napi/test_conversions/test_conversions.c @@ -24,7 +24,7 @@ napi_value AsInt32(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_value_int32(env, args[0], &value)); napi_value output; - NAPI_CALL(env, napi_create_number(env, value, &output)); + NAPI_CALL(env, napi_create_int32(env, value, &output)); return output; } @@ -38,7 +38,7 @@ napi_value AsUInt32(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_value_uint32(env, args[0], &value)); napi_value output; - NAPI_CALL(env, napi_create_number(env, value, &output)); + NAPI_CALL(env, napi_create_uint32(env, value, &output)); return output; } @@ -52,7 +52,7 @@ napi_value AsInt64(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_value_int64(env, args[0], &value)); napi_value output; - NAPI_CALL(env, napi_create_number(env, (double)value, &output)); + NAPI_CALL(env, napi_create_int64(env, (double)value, &output)); return output; } @@ -66,7 +66,7 @@ napi_value AsDouble(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_value_double(env, args[0], &value)); napi_value output; - NAPI_CALL(env, napi_create_number(env, value, &output)); + NAPI_CALL(env, napi_create_double(env, value, &output)); return output; } diff --git a/test/addons-napi/test_general/test_general.c b/test/addons-napi/test_general/test_general.c index 2d2d13a3624e6e..858611939474b2 100644 --- a/test/addons-napi/test_general/test_general.c +++ b/test/addons-napi/test_general/test_general.c @@ -29,7 +29,7 @@ napi_value testGetVersion(napi_env env, napi_callback_info info) { uint32_t version; napi_value result; NAPI_CALL(env, napi_get_version(env, &version)); - NAPI_CALL(env, napi_create_number(env, version, &result)); + NAPI_CALL(env, napi_create_uint32(env, version, &result)); return result; } diff --git a/test/addons-napi/test_number/test_number.c b/test/addons-napi/test_number/test_number.c index 1054741d2de369..6b28afe18ff9e8 100644 --- a/test/addons-napi/test_number/test_number.c +++ b/test/addons-napi/test_number/test_number.c @@ -18,7 +18,7 @@ napi_value Test(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_value_double(env, args[0], &input)); napi_value output; - NAPI_CALL(env, napi_create_number(env, input, &output)); + NAPI_CALL(env, napi_create_double(env, input, &output)); return output; } @@ -40,7 +40,7 @@ napi_value TestInt32Truncation(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_value_int32(env, args[0], &input)); napi_value output; - NAPI_CALL(env, napi_create_number(env, input, &output)); + NAPI_CALL(env, napi_create_int32(env, input, &output)); return output; } diff --git a/test/addons-napi/test_object/test_object.c b/test/addons-napi/test_object/test_object.c index 663e561a35b197..88ac79c170f5ea 100644 --- a/test/addons-napi/test_object/test_object.c +++ b/test/addons-napi/test_object/test_object.c @@ -144,7 +144,7 @@ napi_value New(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_create_object(env, &ret)); napi_value num; - NAPI_CALL(env, napi_create_number(env, 987654321, &num)); + NAPI_CALL(env, napi_create_int32(env, 987654321, &num)); NAPI_CALL(env, napi_set_named_property(env, ret, "test_number", num)); @@ -187,7 +187,7 @@ napi_value Inflate(napi_env env, napi_callback_info info) { double double_val; NAPI_CALL(env, napi_get_value_double(env, value, &double_val)); - NAPI_CALL(env, napi_create_number(env, double_val + 1, &value)); + NAPI_CALL(env, napi_create_double(env, double_val + 1, &value)); NAPI_CALL(env, napi_set_property(env, obj, property_str, value)); } diff --git a/test/addons-napi/test_properties/test_properties.c b/test/addons-napi/test_properties/test_properties.c index 3053fda4864e8f..3f4f0a6bcbba96 100644 --- a/test/addons-napi/test_properties/test_properties.c +++ b/test/addons-napi/test_properties/test_properties.c @@ -10,7 +10,7 @@ napi_value GetValue(napi_env env, napi_callback_info info) { NAPI_ASSERT(env, argc == 0, "Wrong number of arguments"); napi_value number; - NAPI_CALL(env, napi_create_number(env, value_, &number)); + NAPI_CALL(env, napi_create_double(env, value_, &number)); return number; } @@ -61,7 +61,7 @@ napi_value HasNamedProperty(napi_env env, napi_callback_info info) { void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_value number; - NAPI_CALL_RETURN_VOID(env, napi_create_number(env, value_, &number)); + NAPI_CALL_RETURN_VOID(env, napi_create_double(env, value_, &number)); napi_value name_value; NAPI_CALL_RETURN_VOID(env, napi_create_string_utf8(env, diff --git a/test/addons-napi/test_reference/test_reference.c b/test/addons-napi/test_reference/test_reference.c index 0d3925ee5d04bc..b16e10d70361b5 100644 --- a/test/addons-napi/test_reference/test_reference.c +++ b/test/addons-napi/test_reference/test_reference.c @@ -8,7 +8,7 @@ static napi_ref test_reference = NULL; napi_value GetFinalizeCount(napi_env env, napi_callback_info info) { napi_value result; - NAPI_CALL(env, napi_create_number(env, finalize_count, &result)); + NAPI_CALL(env, napi_create_int32(env, finalize_count, &result)); return result; } @@ -107,7 +107,7 @@ napi_value IncrementRefcount(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_reference_ref(env, test_reference, &refcount)); napi_value result; - NAPI_CALL(env, napi_create_number(env, refcount, &result)); + NAPI_CALL(env, napi_create_uint32(env, refcount, &result)); return result; } @@ -119,7 +119,7 @@ napi_value DecrementRefcount(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_reference_unref(env, test_reference, &refcount)); napi_value result; - NAPI_CALL(env, napi_create_number(env, refcount, &result)); + NAPI_CALL(env, napi_create_uint32(env, refcount, &result)); return result; } diff --git a/test/addons-napi/test_string/test_string.c b/test/addons-napi/test_string/test_string.c index 5cd6d413a679fb..ec80e2c7b57fa8 100644 --- a/test/addons-napi/test_string/test_string.c +++ b/test/addons-napi/test_string/test_string.c @@ -174,7 +174,7 @@ napi_value Utf16Length(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_value_string_utf16(env, args[0], NULL, 0, &length)); napi_value output; - NAPI_CALL(env, napi_create_number(env, (double)length, &output)); + NAPI_CALL(env, napi_create_uint32(env, (uint32_t)length, &output)); return output; } @@ -196,7 +196,7 @@ napi_value Utf8Length(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_value_string_utf8(env, args[0], NULL, 0, &length)); napi_value output; - NAPI_CALL(env, napi_create_number(env, (double)length, &output)); + NAPI_CALL(env, napi_create_uint32(env, (uint32_t)length, &output)); return output; } From 951adbef3d9d19e9bed450f3a9bdec6ce6219ec7 Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Tue, 8 Aug 2017 19:09:28 +0200 Subject: [PATCH 134/227] n-api: add napi_get_node_version MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add `napi_get_node_version`, to help with feature-detecting Node.js as an environment. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14696 Reviewed-By: Kyle Farnung Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Tobias Nießen --- doc/api/n-api.md | 31 ++++++++++++++++++++ src/node.cc | 3 +- src/node_api.cc | 14 +++++++++ src/node_api.h | 4 +++ src/node_api_types.h | 7 +++++ src/node_version.h | 4 +++ test/addons-napi/test_general/test.js | 5 ++++ test/addons-napi/test_general/test_general.c | 20 +++++++++++++ 8 files changed, 87 insertions(+), 1 deletion(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 5b7c97d15af6a5..0009a279309daf 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3285,6 +3285,35 @@ callback invocation, even if it has been successfully cancelled. ## Version Management +### napi_get_node_version + + +```C +typedef struct { + uint32_t major; + uint32_t minor; + uint32_t patch; + const char* release; +} napi_node_version; + +NAPI_EXTERN +napi_status napi_get_node_version(napi_env env, + const napi_node_version** version); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] version`: A pointer to version information for Node itself. + +Returns `napi_ok` if the API succeeded. + +This function fills the `version` struct with the major, minor and patch version +of Node that is currently running, and the `release` field with the +value of [`process.release.name`][`process.release`]. + +The returned buffer is statically allocated and does not need to be freed. + ### napi_get_version + [Asynchronous Operations]: #n_api_asynchronous_operations [Basic N-API Data Types]: #n_api_basic_n_api_data_types [ECMAScript Language Specification]: https://tc39.github.io/ecma262/ From b0f09a2ee6a7e54057a4ba9aed645bdb7f989b1e Mon Sep 17 00:00:00 2001 From: Kyle Farnung Date: Mon, 7 Aug 2017 14:07:19 -0700 Subject: [PATCH 137/227] doc: added napi_get_value_string_latin1 * Reordered string functions alphabetically * Fixed a typo in napi_get_value_string_utf8 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14678 Fixes: https://github.com/nodejs/node/issues/14397 Refs: https://github.com/nodejs/node/issues/14256 Reviewed-By: Refael Ackermann Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson Reviewed-By: James M Snell Reviewed-By: Timothy Gu --- doc/api/n-api.md | 73 +++++++++++++++++++++++++++++++++--------------- 1 file changed, 50 insertions(+), 23 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 43b5bddb5f634b..c26a1222e19be2 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -1462,51 +1462,51 @@ The JavaScript Number type is described in [Section 6.1.6](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type) of the ECMAScript Language Specification. -#### *napi_create_string_utf16* +#### *napi_create_string_latin1* ```C -napi_status napi_create_string_utf16(napi_env env, - const char16_t* str, - size_t length, - napi_value* result) +NAPI_EXTERN napi_status napi_create_string_latin1(napi_env env, + const char* str, + size_t length, + napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. -- `[in] str`: Character buffer representing a UTF16-LE-encoded string. -- `[in] length`: The length of the string in two-byte code units, or -1 if -it is null-terminated. +- `[in] str`: Character buffer representing a ISO-8859-1-encoded string. +- `[in] length`: The length of the string in bytes, or -1 if it is +null-terminated. - `[out] result`: A `napi_value` representing a JavaScript String. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript String object from a UTF16-LE-encoded C string +This API creates a JavaScript String object from a ISO-8859-1-encoded C string. The JavaScript String type is described in [Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type) of the ECMAScript Language Specification. -#### *napi_create_string_latin1* +#### *napi_create_string_utf16* ```C -NAPI_EXTERN napi_status napi_create_string_latin1(napi_env env, - const char* str, - size_t length, - napi_value* result); +napi_status napi_create_string_utf16(napi_env env, + const char16_t* str, + size_t length, + napi_value* result) ``` - `[in] env`: The environment that the API is invoked under. -- `[in] str`: Character buffer representing a latin1-encoded string. -- `[in] length`: The length of the string in bytes, or -1 if it is -null-terminated. +- `[in] str`: Character buffer representing a UTF16-LE-encoded string. +- `[in] length`: The length of the string in two-byte code units, or -1 if +it is null-terminated. - `[out] result`: A `napi_value` representing a JavaScript String. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript String object from a latin1-encoded C string. +This API creates a JavaScript String object from a UTF16-LE-encoded C string The JavaScript String type is described in [Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type) @@ -1795,6 +1795,33 @@ is passed in it returns `napi_number_expected`. This API returns the C int64 primitive equivalent of the given JavaScript Number +#### *napi_get_value_string_latin1* + +```C +NAPI_EXTERN napi_status napi_get_value_string_latin1(napi_env env, + napi_value value, + char* buf, + size_t bufsize, + size_t* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] value`: `napi_value` representing JavaScript string. +- `[in] buf`: Buffer to write the ISO-8859-1-encoded string into. If NULL is +passed in, the length of the string (in bytes) is returned. +- `[in] bufsize`: Size of the destination buffer. +- `[out] result`: Number of bytes copied into the buffer including the null +terminator. If the buffer size is insufficient, the string will be truncated +including a null terminator. + +Returns `napi_ok` if the API succeeded. If a non-String `napi_value` +is passed in it returns `napi_string_expected`. + +This API returns the ISO-8859-1-encoded string corresponding the value passed +in. + #### *napi_get_value_string_utf8* +```C +napi_status napi_remove_wrap(napi_env env, + napi_value js_object, + void** result); +``` + + - `[in] env`: The environment that the API is invoked under. + - `[in] js_object`: The object associated with the native instance. + - `[out] result`: Pointer to the wrapped native instance. + +Returns `napi_ok` if the API succeeded. + +Retrieves a native instance that was previously wrapped in the JavaScript +object `js_object` using `napi_wrap()` and removes the wrapping, thereby +restoring the JavaScript object's prototype chain. If a finalize callback was +associated with the wrapping, it will no longer be called when the JavaScript +object becomes garbage-collected. + ## Asynchronous Operations Addon modules often need to leverage async helpers from libuv as part of their diff --git a/src/node_api.cc b/src/node_api.cc index 2caa85aee5fc2c..db66ea0bab4550 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -679,6 +679,8 @@ v8::Local CreateAccessorCallbackData(napi_env env, return cbdata; } +int kWrapperFields = 3; + // Pointer used to identify items wrapped by N-API. Used by FindWrapper and // napi_wrap(). const char napi_wrap_name[] = "N-API Wrapper"; @@ -687,7 +689,8 @@ const char napi_wrap_name[] = "N-API Wrapper"; // wrapper would be the first in the chain, but it is OK for other objects to // be inserted in the prototype chain. bool FindWrapper(v8::Local obj, - v8::Local* result = nullptr) { + v8::Local* result = nullptr, + v8::Local* parent = nullptr) { v8::Local wrapper = obj; do { @@ -695,8 +698,11 @@ bool FindWrapper(v8::Local obj, if (proto.IsEmpty() || !proto->IsObject()) { return false; } + if (parent != nullptr) { + *parent = wrapper; + } wrapper = proto.As(); - if (wrapper->InternalFieldCount() == 2) { + if (wrapper->InternalFieldCount() == kWrapperFields) { v8::Local external = wrapper->GetInternalField(1); if (external->IsExternal() && external.As()->Value() == v8impl::napi_wrap_name) { @@ -750,6 +756,29 @@ napi_env GetEnv(v8::Local context) { return result; } +napi_status Unwrap(napi_env env, + napi_value js_object, + void** result, + v8::Local* wrapper, + v8::Local* parent = nullptr) { + CHECK_ARG(env, js_object); + CHECK_ARG(env, result); + + v8::Local value = v8impl::V8LocalValueFromJsValue(js_object); + RETURN_STATUS_IF_FALSE(env, value->IsObject(), napi_invalid_arg); + v8::Local obj = value.As(); + + RETURN_STATUS_IF_FALSE( + env, v8impl::FindWrapper(obj, wrapper, parent), napi_invalid_arg); + + v8::Local unwrappedValue = (*wrapper)->GetInternalField(0); + RETURN_STATUS_IF_FALSE(env, unwrappedValue->IsExternal(), napi_invalid_arg); + + *result = unwrappedValue.As()->Value(); + + return napi_ok; +} + } // end of namespace v8impl // Intercepts the Node-V8 module registration callback. Converts parameters @@ -2269,62 +2298,78 @@ napi_status napi_wrap(napi_env env, // Create a wrapper object with an internal field to hold the wrapped pointer // and a second internal field to identify the owner as N-API. v8::Local wrapper_template; - ENV_OBJECT_TEMPLATE(env, wrap, wrapper_template, 2); + ENV_OBJECT_TEMPLATE(env, wrap, wrapper_template, v8impl::kWrapperFields); auto maybe_object = wrapper_template->NewInstance(context); CHECK_MAYBE_EMPTY(env, maybe_object, napi_generic_failure); - v8::Local wrapper = maybe_object.ToLocalChecked(); - wrapper->SetInternalField(1, v8::External::New(isolate, - reinterpret_cast(const_cast(v8impl::napi_wrap_name)))); // Store the pointer as an external in the wrapper. wrapper->SetInternalField(0, v8::External::New(isolate, native_object)); + wrapper->SetInternalField(1, v8::External::New(isolate, + reinterpret_cast(const_cast(v8impl::napi_wrap_name)))); // Insert the wrapper into the object's prototype chain. v8::Local proto = obj->GetPrototype(); CHECK(wrapper->SetPrototype(context, proto).FromJust()); CHECK(obj->SetPrototype(context, wrapper).FromJust()); + v8impl::Reference* reference = nullptr; if (result != nullptr) { // The returned reference should be deleted via napi_delete_reference() // ONLY in response to the finalize callback invocation. (If it is deleted // before then, then the finalize callback will never be invoked.) // Therefore a finalize callback is required when returning a reference. CHECK_ARG(env, finalize_cb); - v8impl::Reference* reference = v8impl::Reference::New( + reference = v8impl::Reference::New( env, obj, 0, false, finalize_cb, native_object, finalize_hint); *result = reinterpret_cast(reference); } else if (finalize_cb != nullptr) { // Create a self-deleting reference just for the finalize callback. - v8impl::Reference::New( + reference = v8impl::Reference::New( env, obj, 0, true, finalize_cb, native_object, finalize_hint); } + if (reference != nullptr) { + wrapper->SetInternalField(2, v8::External::New(isolate, reference)); + } + return GET_RETURN_STATUS(env); } -napi_status napi_unwrap(napi_env env, napi_value js_object, void** result) { +napi_status napi_unwrap(napi_env env, napi_value obj, void** result) { // Omit NAPI_PREAMBLE and GET_RETURN_STATUS because V8 calls here cannot throw // JS exceptions. CHECK_ENV(env); - CHECK_ARG(env, js_object); - CHECK_ARG(env, result); - - v8::Local value = v8impl::V8LocalValueFromJsValue(js_object); - RETURN_STATUS_IF_FALSE(env, value->IsObject(), napi_invalid_arg); - v8::Local obj = value.As(); + v8::Local wrapper; + return napi_set_last_error(env, v8impl::Unwrap(env, obj, result, &wrapper)); +} +napi_status napi_remove_wrap(napi_env env, napi_value obj, void** result) { + NAPI_PREAMBLE(env); v8::Local wrapper; - RETURN_STATUS_IF_FALSE( - env, v8impl::FindWrapper(obj, &wrapper), napi_invalid_arg); + v8::Local parent; + napi_status status = v8impl::Unwrap(env, obj, result, &wrapper, &parent); + if (status != napi_ok) { + return napi_set_last_error(env, status); + } - v8::Local unwrappedValue = wrapper->GetInternalField(0); - RETURN_STATUS_IF_FALSE(env, unwrappedValue->IsExternal(), napi_invalid_arg); + v8::Local external = wrapper->GetInternalField(2); + if (external->IsExternal()) { + v8impl::Reference::Delete( + static_cast(external.As()->Value())); + } - *result = unwrappedValue.As()->Value(); + if (!parent.IsEmpty()) { + v8::Maybe maybe = parent->SetPrototype( + env->isolate->GetCurrentContext(), wrapper->GetPrototype()); + CHECK_MAYBE_NOTHING(env, maybe, napi_generic_failure); + if (!maybe.FromMaybe(false)) { + return napi_set_last_error(env, napi_generic_failure); + } + } - return napi_clear_last_error(env); + return GET_RETURN_STATUS(env); } napi_status napi_create_external(napi_env env, diff --git a/src/node_api.h b/src/node_api.h index 0cf0ba046997d3..e52e2016d733b9 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -362,6 +362,9 @@ NAPI_EXTERN napi_status napi_wrap(napi_env env, NAPI_EXTERN napi_status napi_unwrap(napi_env env, napi_value js_object, void** result); +NAPI_EXTERN napi_status napi_remove_wrap(napi_env env, + napi_value js_object, + void** result); NAPI_EXTERN napi_status napi_create_external(napi_env env, void* data, napi_finalize finalize_cb, diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js index 5dfdf6e8539c5a..10cf8b6e40132b 100644 --- a/test/addons-napi/test_general/test.js +++ b/test/addons-napi/test_general/test.js @@ -1,4 +1,5 @@ 'use strict'; +// Flags: --expose-gc const common = require('../../common'); const test_general = require(`./build/${common.buildType}/test_general`); @@ -56,10 +57,37 @@ assert.strictEqual(release, process.release.name); // for null assert.strictEqual(test_general.testNapiTypeof(null), 'null'); -const x = {}; +// Ensure that garbage collecting an object with a wrapped native item results +// in the finalize callback being called. +let w = {}; +test_general.wrap(w, []); +w = null; +global.gc(); +assert.strictEqual(test_general.derefItemWasCalled(), true, + 'deref_item() was called upon garbage collecting a ' + + 'wrapped object'); // Assert that wrapping twice fails. +const x = {}; test_general.wrap(x, 25); assert.throws(function() { test_general.wrap(x, 'Blah'); }, Error); + +// Ensure that wrapping, removing the wrap, and then wrapping again works. +const y = {}; +test_general.wrap(y, -12); +test_general.removeWrap(y); +assert.doesNotThrow(function() { + test_general.wrap(y, 're-wrap!'); +}, Error, 'Wrapping twice succeeds if a remove_wrap() separates the instances'); + +// Ensure that removing a wrap and garbage collecting does not fire the +// finalize callback. +let z = {}; +test_general.testFinalizeWrap(z); +test_general.removeWrap(z); +z = null; +global.gc(); +assert.strictEqual(test_general.finalizeWasCalled(), false, + 'finalize callback was not called upon garbage collection'); diff --git a/test/addons-napi/test_general/test_general.c b/test/addons-napi/test_general/test_general.c index da347bd68bcc9a..ecec3e014ba0b1 100644 --- a/test/addons-napi/test_general/test_general.c +++ b/test/addons-napi/test_general/test_general.c @@ -138,17 +138,29 @@ napi_value testNapiTypeof(napi_env env, napi_callback_info info) { return result; } +static bool deref_item_called = false; static void deref_item(napi_env env, void* data, void* hint) { (void) hint; + deref_item_called = true; NAPI_CALL_RETURN_VOID(env, napi_delete_reference(env, (napi_ref)data)); } +napi_value deref_item_was_called(napi_env env, napi_callback_info info) { + napi_value it_was_called; + + NAPI_CALL(env, napi_get_boolean(env, deref_item_called, &it_was_called)); + + return it_was_called; +} + napi_value wrap(napi_env env, napi_callback_info info) { size_t argc = 2; napi_value argv[2]; napi_ref payload; + deref_item_called = false; + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, argv, NULL, NULL)); NAPI_CALL(env, napi_create_reference(env, argv[1], 1, &payload)); NAPI_CALL(env, napi_wrap(env, argv[0], payload, deref_item, NULL, NULL)); @@ -156,6 +168,43 @@ napi_value wrap(napi_env env, napi_callback_info info) { return NULL; } +napi_value remove_wrap(napi_env env, napi_callback_info info) { + size_t argc = 1; + napi_value wrapped; + void* data; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &wrapped, NULL, NULL)); + NAPI_CALL(env, napi_remove_wrap(env, wrapped, &data)); + if (data != NULL) { + NAPI_CALL(env, napi_delete_reference(env, (napi_ref)data)); + } + + return NULL; +} + +static bool finalize_called = false; +static void test_finalize(napi_env env, void* data, void* hint) { + finalize_called = true; +} + +napi_value test_finalize_wrap(napi_env env, napi_callback_info info) { + size_t argc = 1; + napi_value js_object; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &js_object, NULL, NULL)); + NAPI_CALL(env, napi_wrap(env, js_object, NULL, test_finalize, NULL, NULL)); + + return NULL; +} + +napi_value finalize_was_called(napi_env env, napi_callback_info info) { + napi_value it_was_called; + + NAPI_CALL(env, napi_get_boolean(env, finalize_called, &it_was_called)); + + return it_was_called; +} + void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_property_descriptor descriptors[] = { DECLARE_NAPI_PROPERTY("testStrictEquals", testStrictEquals), @@ -169,6 +218,10 @@ void Init(napi_env env, napi_value exports, napi_value module, void* priv) { DECLARE_NAPI_PROPERTY("testNapiErrorCleanup", testNapiErrorCleanup), DECLARE_NAPI_PROPERTY("testNapiTypeof", testNapiTypeof), DECLARE_NAPI_PROPERTY("wrap", wrap), + DECLARE_NAPI_PROPERTY("removeWrap", remove_wrap), + DECLARE_NAPI_PROPERTY("testFinalizeWrap", test_finalize_wrap), + DECLARE_NAPI_PROPERTY("finalizeWasCalled", finalize_was_called), + DECLARE_NAPI_PROPERTY("derefItemWasCalled", deref_item_was_called), }; NAPI_CALL_RETURN_VOID(env, napi_define_properties( From a766802bee21a1ae1160da67aa1758073ac27fba Mon Sep 17 00:00:00 2001 From: Daniel Taveras Date: Thu, 27 Jul 2017 21:05:01 -0400 Subject: [PATCH 139/227] doc: fix doc for napi_get_value_string_utf8 The API for napi_get_value_string_utf8() appears to have been previously changed. This improves the doc reflect the current design. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14529 Fixes: https://github.com/nodejs/node/issues/14398 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Michael Dawson --- doc/api/n-api.md | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index be06a5ab3bed7a..89aa08a15198ce 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -1811,10 +1811,10 @@ NAPI_EXTERN napi_status napi_get_value_string_latin1(napi_env env, - `[in] value`: `napi_value` representing JavaScript string. - `[in] buf`: Buffer to write the ISO-8859-1-encoded string into. If NULL is passed in, the length of the string (in bytes) is returned. -- `[in] bufsize`: Size of the destination buffer. -- `[out] result`: Number of bytes copied into the buffer including the null -terminator. If the buffer size is insufficient, the string will be truncated -including a null terminator. +- `[in] bufsize`: Size of the destination buffer. When this value is +insufficient, the returned string will be truncated. +- `[out] result`: Number of bytes copied into the buffer, excluding the null +terminator. Returns `napi_ok` if the API succeeded. If a non-String `napi_value` is passed in it returns `napi_string_expected`. @@ -1837,11 +1837,11 @@ napi_status napi_get_value_string_utf8(napi_env env, - `[in] env`: The environment that the API is invoked under. - `[in] value`: `napi_value` representing JavaScript string. - `[in] buf`: Buffer to write the UTF8-encoded string into. If NULL is passed -in, the length of the string (in bytes) is returned. -- `[in] bufsize`: Size of the destination buffer. -- `[out] result`: Number of bytes copied into the buffer including the null -terminator. If the buffer size is insufficient, the string will be truncated -including a null terminator. + in, the length of the string (in bytes) is returned. +- `[in] bufsize`: Size of the destination buffer. When this value is +insufficient, the returned string will be truncated. +- `[out] result`: Number of bytes copied into the buffer, excluding the null +terminator. Returns `napi_ok` if the API succeeded. If a non-String `napi_value` is passed in it returns `napi_string_expected`. @@ -1864,10 +1864,10 @@ napi_status napi_get_value_string_utf16(napi_env env, - `[in] value`: `napi_value` representing JavaScript string. - `[in] buf`: Buffer to write the UTF16-LE-encoded string into. If NULL is passed in, the length of the string (in 2-byte code units) is returned. -- `[in] bufsize`: Size of the destination buffer. -- `[out] result`: Number of 2-byte code units copied into the buffer including -the null terminator. If the buffer size is insufficient, the string will be -truncated including a null terminator. +- `[in] bufsize`: Size of the destination buffer. When this value is +insufficient, the returned string will be truncated. +- `[out] result`: Number of 2-byte code units copied into the buffer, excluding the null +terminator. Returns `napi_ok` if the API succeeded. If a non-String `napi_value` is passed in it returns `napi_string_expected`. From 31c97178c142ddbb4f4a1b942f6ec31e01ae2b2f Mon Sep 17 00:00:00 2001 From: Daniil Shakir Date: Tue, 22 Aug 2017 00:36:32 +0300 Subject: [PATCH 140/227] test: remove unused parameters Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14968 Reviewed-By: Rich Trott Reviewed-By: Kunal Pathak Reviewed-By: Colin Ihrig Reviewed-By: David Cai Reviewed-By: Yuta Hiroto Reviewed-By: Luigi Pinca Reviewed-By: Daniel Bevenius Reviewed-By: James M Snell --- test/addons-napi/test_async/test.js | 2 +- test/addons-napi/test_typedarray/test.js | 2 +- test/addons/repl-domain-abort/test.js | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/test/addons-napi/test_async/test.js b/test/addons-napi/test_async/test.js index eda6963743ff6f..cfd39a5b11ae2d 100644 --- a/test/addons-napi/test_async/test.js +++ b/test/addons-napi/test_async/test.js @@ -9,7 +9,7 @@ const testException = 'test_async_cb_exception'; // Exception thrown from async completion callback. // (Tested in a spawned process because the exception is fatal.) if (process.argv[2] === 'child') { - test_async.Test(1, common.mustCall(function(err, val) { + test_async.Test(1, common.mustCall(function() { throw new Error(testException); })); return; diff --git a/test/addons-napi/test_typedarray/test.js b/test/addons-napi/test_typedarray/test.js index dcb7d76442f608..b81295902069b7 100644 --- a/test/addons-napi/test_typedarray/test.js +++ b/test/addons-napi/test_typedarray/test.js @@ -44,7 +44,7 @@ const arrayTypes = [ Int8Array, Uint8Array, Uint8ClampedArray, Int16Array, Uint16Array, Int32Array, Uint32Array, Float32Array, Float64Array ]; -arrayTypes.forEach((currentType, key) => { +arrayTypes.forEach((currentType) => { const template = Reflect.construct(currentType, buffer); const theArray = test_typedarray.CreateTypedArray(template, buffer); diff --git a/test/addons/repl-domain-abort/test.js b/test/addons/repl-domain-abort/test.js index 8e5d8785164fb9..9b9a63abfb685c 100644 --- a/test/addons/repl-domain-abort/test.js +++ b/test/addons/repl-domain-abort/test.js @@ -25,7 +25,7 @@ const lines = [ const dInput = new stream.Readable(); const dOutput = new stream.Writable(); -dInput._read = function _read(size) { +dInput._read = function _read() { while (lines.length > 0 && this.push(lines.shift())); if (lines.length === 0) this.push(null); From 278a2d069f97e7fdff5892eefc98d85f38153c94 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Thu, 24 Aug 2017 13:33:26 +0300 Subject: [PATCH 141/227] n-api: implement promise Promise is implemented as a pair of objects. `napi_create_promise()` returns both a JavaScript promise and a newly allocated "deferred" in its out-params. The deferred is linked to the promise such that the deferred can be passed to `napi_resolve_deferred()` or `napi_reject_deferred()` to reject/resolve the promise. `napi_is_promise()` can be used to check if a `napi_value` is a native promise - that is, a promise created by the underlying engine, rather than a pure JS implementation of a promise. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14365 Fixes: https://github.com/nodejs/abi-stable-node/issues/242 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell Reviewed-By: Benjamin Gruenbaum Reviewed-By: Timothy Gu --- doc/api/n-api.md | 138 +++++++++++++++++++ src/node_api.cc | 78 +++++++++++ src/node_api.h | 14 ++ src/node_api_types.h | 1 + test/addons-napi/test_promise/binding.gyp | 8 ++ test/addons-napi/test_promise/test.js | 60 ++++++++ test/addons-napi/test_promise/test_promise.c | 60 ++++++++ 7 files changed, 359 insertions(+) create mode 100644 test/addons-napi/test_promise/binding.gyp create mode 100644 test/addons-napi/test_promise/test.js create mode 100644 test/addons-napi/test_promise/test_promise.c diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 89aa08a15198ce..12aad7583f6d93 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -42,6 +42,7 @@ The documentation for N-API is structured as follows: * [Working with JavaScript Functions][] * [Object Wrap][] * [Asynchronous Operations][] +* [Promises][] The N-API is a C API that ensures ABI stability across Node.js versions and different compiler levels. However, we also understand that a C++ @@ -3395,6 +3396,142 @@ support it: +## Promises + +N-API provides facilities for creating `Promise` objects as described in +[Section 25.4][] of the ECMA specification. It implements promises as a pair of +objects. When a promise is created by `napi_create_promise()`, a "deferred" +object is created and returned alongside the `Promise`. The deferred object is +bound to the created `Promise` and is the only means to resolve or reject the +`Promise` using `napi_resolve_deferred()` or `napi_reject_deferred()`. The +deferred object that is created by `napi_create_promise()` is freed by +`napi_resolve_deferred()` or `napi_reject_deferred()`. The `Promise` object may +be returned to JavaScript where it can be used in the usual fashion. + +For example, to create a promise and pass it to an asynchronous worker: +```c +napi_deferred deferred; +napi_value promise; +napi_status status; + +// Create the promise. +status = napi_create_promise(env, &deferred, &promise); +if (status != napi_ok) return NULL; + +// Pass the deferred to a function that performs an asynchronous action. +do_something_asynchronous(deferred); + +// Return the promise to JS +return promise; +``` + +The above function `do_something_asynchronous()` would perform its asynchronous +action and then it would resolve or reject the deferred, thereby concluding the +promise and freeing the deferred: +```c +napi_deferred deferred; +napi_value undefined; +napi_status status; + +// Create a value with which to conclude the deferred. +status = napi_get_undefined(env, &undefined); +if (status != napi_ok) return NULL; + +// Resolve or reject the promise associated with the deferred depending on +// whether the asynchronous action succeeded. +if (asynchronous_action_succeeded) { + status = napi_resolve_deferred(env, deferred, undefined); +} else { + status = napi_reject_deferred(env, deferred, undefined); +} +if (status != napi_ok) return NULL; + +// At this point the deferred has been freed, so we should assign NULL to it. +deferred = NULL; +``` + +### napi_create_promise + +```C +NAPI_EXTERN napi_status napi_create_promise(napi_env env, + napi_deferred* deferred, + napi_value* promise); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] deferred`: A newly created deferred object which can later be passed to +`napi_resolve_deferred()` or `napi_reject_deferred()` to resolve resp. reject +the associated promise. +- `[out] promise`: The JavaScript promise associated with the deferred object. + +Returns `napi_ok` if the API succeeded. + +This API creates a deferred object and a JavaScript promise. + +### napi_resolve_deferred + +```C +NAPI_EXTERN napi_status napi_resolve_deferred(napi_env env, + napi_deferred deferred, + napi_value resolution); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] deferred`: The deferred object whose associated promise to resolve. +- `[in] resolution`: The value with which to resolve the promise. + +This API resolves a JavaScript promise by way of the deferred object +with which it is associated. Thus, it can only be used to resolve JavaScript +promises for which the corresponding deferred object is available. This +effectively means that the promise must have been created using +`napi_create_promise()` and the deferred object returned from that call must +have been retained in order to be passed to this API. + +The deferred object is freed upon successful completion. + +### napi_reject_deferred + +```C +NAPI_EXTERN napi_status napi_reject_deferred(napi_env env, + napi_deferred deferred, + napi_value rejection); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] deferred`: The deferred object whose associated promise to resolve. +- `[in] rejection`: The value with which to reject the promise. + +This API rejects a JavaScript promise by way of the deferred object +with which it is associated. Thus, it can only be used to reject JavaScript +promises for which the corresponding deferred object is available. This +effectively means that the promise must have been created using +`napi_create_promise()` and the deferred object returned from that call must +have been retained in order to be passed to this API. + +The deferred object is freed upon successful completion. + +### napi_is_promise + +```C +NAPI_EXTERN napi_status napi_is_promise(napi_env env, + napi_value promise, + bool* is_promise); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] promise`: The promise to examine +- `[out] is_promise`: Flag indicating whether `promise` is a native promise +object - that is, a promise object created by the underlying engine. + +[Promises]: #n_api_promises [Asynchronous Operations]: #n_api_asynchronous_operations [Basic N-API Data Types]: #n_api_basic_n_api_data_types [ECMAScript Language Specification]: https://tc39.github.io/ecma262/ @@ -3406,6 +3543,7 @@ support it: [Section 9.1.6]: https://tc39.github.io/ecma262/#sec-ordinary-object-internal-methods-and-internal-slots-defineownproperty-p-desc [Section 12.5.5]: https://tc39.github.io/ecma262/#sec-typeof-operator [Section 24.3]: https://tc39.github.io/ecma262/#sec-dataview-objects +[Section 25.4]: https://tc39.github.io/ecma262/#sec-promise-objects [Working with JavaScript Functions]: #n_api_working_with_javascript_functions [Working with JavaScript Properties]: #n_api_working_with_javascript_properties [Working with JavaScript Values]: #n_api_working_with_javascript_values diff --git a/src/node_api.cc b/src/node_api.cc index db66ea0bab4550..fd31817e4a8f71 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -223,6 +223,14 @@ V8EscapableHandleScopeFromJsEscapableHandleScope( static_assert(sizeof(v8::Local) == sizeof(napi_value), "Cannot convert between v8::Local and napi_value"); +napi_deferred JsDeferredFromV8Persistent(v8::Persistent* local) { + return reinterpret_cast(local); +} + +v8::Persistent* V8PersistentFromJsDeferred(napi_deferred local) { + return reinterpret_cast*>(local); +} + napi_value JsValueFromV8LocalValue(v8::Local local) { return reinterpret_cast(*local); } @@ -779,6 +787,33 @@ napi_status Unwrap(napi_env env, return napi_ok; } +napi_status ConcludeDeferred(napi_env env, + napi_deferred deferred, + napi_value result, + bool is_resolved) { + NAPI_PREAMBLE(env); + CHECK_ARG(env, result); + + v8::Local context = env->isolate->GetCurrentContext(); + v8::Persistent* deferred_ref = + V8PersistentFromJsDeferred(deferred); + v8::Local v8_deferred = + v8::Local::New(env->isolate, *deferred_ref); + + auto v8_resolver = v8::Local::Cast(v8_deferred); + + v8::Maybe success = is_resolved ? + v8_resolver->Resolve(context, v8impl::V8LocalValueFromJsValue(result)) : + v8_resolver->Reject(context, v8impl::V8LocalValueFromJsValue(result)); + + deferred_ref->Reset(); + delete deferred_ref; + + RETURN_STATUS_IF_FALSE(env, success.FromMaybe(false), napi_generic_failure); + + return GET_RETURN_STATUS(env); +} + } // end of namespace v8impl // Intercepts the Node-V8 module registration callback. Converts parameters @@ -3335,3 +3370,46 @@ napi_status napi_cancel_async_work(napi_env env, napi_async_work work) { return napi_clear_last_error(env); } + +NAPI_EXTERN napi_status napi_create_promise(napi_env env, + napi_deferred* deferred, + napi_value* promise) { + NAPI_PREAMBLE(env); + CHECK_ARG(env, deferred); + CHECK_ARG(env, promise); + + auto maybe = v8::Promise::Resolver::New(env->isolate->GetCurrentContext()); + CHECK_MAYBE_EMPTY(env, maybe, napi_generic_failure); + + auto v8_resolver = maybe.ToLocalChecked(); + auto v8_deferred = new v8::Persistent(); + v8_deferred->Reset(env->isolate, v8_resolver); + + *deferred = v8impl::JsDeferredFromV8Persistent(v8_deferred); + *promise = v8impl::JsValueFromV8LocalValue(v8_resolver->GetPromise()); + return GET_RETURN_STATUS(env); +} + +NAPI_EXTERN napi_status napi_resolve_deferred(napi_env env, + napi_deferred deferred, + napi_value resolution) { + return v8impl::ConcludeDeferred(env, deferred, resolution, true); +} + +NAPI_EXTERN napi_status napi_reject_deferred(napi_env env, + napi_deferred deferred, + napi_value resolution) { + return v8impl::ConcludeDeferred(env, deferred, resolution, false); +} + +NAPI_EXTERN napi_status napi_is_promise(napi_env env, + napi_value promise, + bool* is_promise) { + CHECK_ENV(env); + CHECK_ARG(env, promise); + CHECK_ARG(env, is_promise); + + *is_promise = v8impl::V8LocalValueFromJsValue(promise)->IsPromise(); + + return napi_clear_last_error(env); +} diff --git a/src/node_api.h b/src/node_api.h index e52e2016d733b9..6a4b2941879ff0 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -543,6 +543,20 @@ NAPI_EXTERN napi_status napi_get_node_version(napi_env env, const napi_node_version** version); +// Promises +NAPI_EXTERN napi_status napi_create_promise(napi_env env, + napi_deferred* deferred, + napi_value* promise); +NAPI_EXTERN napi_status napi_resolve_deferred(napi_env env, + napi_deferred deferred, + napi_value resolution); +NAPI_EXTERN napi_status napi_reject_deferred(napi_env env, + napi_deferred deferred, + napi_value rejection); +NAPI_EXTERN napi_status napi_is_promise(napi_env env, + napi_value promise, + bool* is_promise); + EXTERN_C_END #endif // SRC_NODE_API_H_ diff --git a/src/node_api_types.h b/src/node_api_types.h index 0bdc377c8fc6a0..ac8482bf9dcf9d 100644 --- a/src/node_api_types.h +++ b/src/node_api_types.h @@ -17,6 +17,7 @@ typedef struct napi_handle_scope__ *napi_handle_scope; typedef struct napi_escapable_handle_scope__ *napi_escapable_handle_scope; typedef struct napi_callback_info__ *napi_callback_info; typedef struct napi_async_work__ *napi_async_work; +typedef struct napi_deferred__ *napi_deferred; typedef enum { napi_default = 0, diff --git a/test/addons-napi/test_promise/binding.gyp b/test/addons-napi/test_promise/binding.gyp new file mode 100644 index 00000000000000..bf266f93db74be --- /dev/null +++ b/test/addons-napi/test_promise/binding.gyp @@ -0,0 +1,8 @@ +{ + "targets": [ + { + "target_name": "test_promise", + "sources": [ "test_promise.c" ] + } + ] +} diff --git a/test/addons-napi/test_promise/test.js b/test/addons-napi/test_promise/test.js new file mode 100644 index 00000000000000..4c2a2e5e76c9fb --- /dev/null +++ b/test/addons-napi/test_promise/test.js @@ -0,0 +1,60 @@ +'use strict'; + +const common = require('../../common'); +const test_promise = require(`./build/${common.buildType}/test_promise`); +const assert = require('assert'); + +let expected_result, promise; + +// A resolution +expected_result = 42; +promise = test_promise.createPromise(); +promise.then( + common.mustCall(function(result) { + assert.strictEqual(result, expected_result, + 'promise resolved as expected'); + }), + common.mustNotCall()); +test_promise.concludeCurrentPromise(expected_result, true); + +// A rejection +expected_result = 'It\'s not you, it\'s me.'; +promise = test_promise.createPromise(); +promise.then( + common.mustNotCall(), + common.mustCall(function(result) { + assert.strictEqual(result, expected_result, + 'promise rejected as expected'); + })); +test_promise.concludeCurrentPromise(expected_result, false); + +// Chaining +promise = test_promise.createPromise(); +promise.then( + common.mustCall(function(result) { + assert.strictEqual(result, 'chained answer', + 'resolving with a promise chains properly'); + }), + common.mustNotCall()); +test_promise.concludeCurrentPromise(Promise.resolve('chained answer'), true); + +assert.strictEqual(test_promise.isPromise(promise), true, + 'natively created promise is recognized as a promise'); + +assert.strictEqual(test_promise.isPromise(Promise.reject(-1)), true, + 'Promise created with JS is recognized as a promise'); + +assert.strictEqual(test_promise.isPromise(2.4), false, + 'Number is recognized as not a promise'); + +assert.strictEqual(test_promise.isPromise('I promise!'), false, + 'String is recognized as not a promise'); + +assert.strictEqual(test_promise.isPromise(undefined), false, + 'undefined is recognized as not a promise'); + +assert.strictEqual(test_promise.isPromise(null), false, + 'null is recognized as not a promise'); + +assert.strictEqual(test_promise.isPromise({}), false, + 'an object is recognized as not a promise'); diff --git a/test/addons-napi/test_promise/test_promise.c b/test/addons-napi/test_promise/test_promise.c new file mode 100644 index 00000000000000..dc617f4592e520 --- /dev/null +++ b/test/addons-napi/test_promise/test_promise.c @@ -0,0 +1,60 @@ +#include +#include "../common.h" + +napi_deferred deferred = NULL; + +napi_value createPromise(napi_env env, napi_callback_info info) { + napi_value promise; + + // We do not overwrite an existing deferred. + if (deferred != NULL) { + return NULL; + } + + NAPI_CALL(env, napi_create_promise(env, &deferred, &promise)); + + return promise; +} + +napi_value concludeCurrentPromise(napi_env env, napi_callback_info info) { + napi_value argv[2]; + size_t argc = 2; + bool resolution; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, argv, NULL, NULL)); + NAPI_CALL(env, napi_get_value_bool(env, argv[1], &resolution)); + if (resolution) { + NAPI_CALL(env, napi_resolve_deferred(env, deferred, argv[0])); + } else { + NAPI_CALL(env, napi_reject_deferred(env, deferred, argv[0])); + } + + deferred = NULL; + + return NULL; +} + +napi_value isPromise(napi_env env, napi_callback_info info) { + napi_value promise, result; + size_t argc = 1; + bool is_promise; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &promise, NULL, NULL)); + NAPI_CALL(env, napi_is_promise(env, promise, &is_promise)); + NAPI_CALL(env, napi_get_boolean(env, is_promise, &result)); + + return result; +} + +void Init(napi_env env, napi_value exports, napi_value module, void* priv) { + napi_property_descriptor descriptors[] = { + DECLARE_NAPI_PROPERTY("createPromise", createPromise), + DECLARE_NAPI_PROPERTY("concludeCurrentPromise", concludeCurrentPromise), + DECLARE_NAPI_PROPERTY("isPromise", isPromise), + }; + + NAPI_CALL_RETURN_VOID(env, napi_define_properties( + env, exports, sizeof(descriptors) / sizeof(*descriptors), descriptors)); +} + +NAPI_MODULE(addon, Init) From ac5b9048083156c8dba338e4973ff04f82dfa712 Mon Sep 17 00:00:00 2001 From: Chris Young Date: Tue, 27 Jun 2017 20:56:15 -0700 Subject: [PATCH 142/227] n-api: adds function to adjust external memory Added a wrapper around v8::Isolate::AdjustAmountOfExternalAllocatedMemory Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14310 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Timothy Gu Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson Reviewed-By: Benjamin Gruenbaum Fixes: https://github.com/nodejs/node/issues/13928 --- doc/api/n-api.md | 25 ++++++++++++++++++++ src/node_api.cc | 13 ++++++++++ src/node_api.h | 5 ++++ test/addons-napi/test_general/test.js | 5 ++++ test/addons-napi/test_general/test_general.c | 11 +++++++++ 5 files changed, 59 insertions(+) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 12aad7583f6d93..9c56e50e83f299 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3394,6 +3394,31 @@ support it: * If the function is not available, provide an alternate implementation that does not use the function. +## Memory Management + +### napi_adjust_external_memory + +```C +NAPI_EXTERN napi_status napi_adjust_external_memory(napi_env env, + int64_t change_in_bytes, + int64_t* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] change_in_bytes`: The change in externally allocated memory that is +kept alive by JavaScript objects. +- `[out] result`: The adjusted value + +Returns `napi_ok` if the API succeeded. + +This function gives V8 an indication of the amount of externally allocated +memory that is kept alive by JavaScript objects (i.e. a JavaScript object +that points to its own memory allocated by a native module). Registering +externally allocated memory will trigger global garbage collections more +often than it would otherwise. + ## Promises diff --git a/src/node_api.cc b/src/node_api.cc index fd31817e4a8f71..a2322295fccbaa 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -3216,6 +3216,19 @@ napi_status napi_get_node_version(napi_env env, return napi_clear_last_error(env); } +napi_status napi_adjust_external_memory(napi_env env, + int64_t change_in_bytes, + int64_t* adjusted_value) { + CHECK_ENV(env); + CHECK_ARG(env, &change_in_bytes); + CHECK_ARG(env, adjusted_value); + + *adjusted_value = env->isolate->AdjustAmountOfExternalAllocatedMemory( + change_in_bytes); + + return napi_clear_last_error(env); +} + namespace uvimpl { static napi_status ConvertUVErrorCode(int code) { diff --git a/src/node_api.h b/src/node_api.h index 6a4b2941879ff0..702ddf2d9e6a0e 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -557,6 +557,11 @@ NAPI_EXTERN napi_status napi_is_promise(napi_env env, napi_value promise, bool* is_promise); +// Memory management +NAPI_EXTERN napi_status napi_adjust_external_memory(napi_env env, + int64_t change_in_bytes, + int64_t* adjusted_value); + EXTERN_C_END #endif // SRC_NODE_API_H_ diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js index 10cf8b6e40132b..ec59922639e949 100644 --- a/test/addons-napi/test_general/test.js +++ b/test/addons-napi/test_general/test.js @@ -91,3 +91,8 @@ z = null; global.gc(); assert.strictEqual(test_general.finalizeWasCalled(), false, 'finalize callback was not called upon garbage collection'); + +// test napi_adjust_external_memory +const adjustedValue = test_general.testAdjustExternalMemory(); +assert.strictEqual(typeof adjustedValue, 'number'); +assert(adjustedValue > 0); diff --git a/test/addons-napi/test_general/test_general.c b/test/addons-napi/test_general/test_general.c index ecec3e014ba0b1..ea1f2ece0a5d30 100644 --- a/test/addons-napi/test_general/test_general.c +++ b/test/addons-napi/test_general/test_general.c @@ -205,6 +205,16 @@ napi_value finalize_was_called(napi_env env, napi_callback_info info) { return it_was_called; } +napi_value testAdjustExternalMemory(napi_env env, napi_callback_info info) { + napi_value result; + int64_t adjustedValue; + + NAPI_CALL(env, napi_adjust_external_memory(env, 1, &adjustedValue)); + NAPI_CALL(env, napi_create_double(env, adjustedValue, &result)); + + return result; +} + void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_property_descriptor descriptors[] = { DECLARE_NAPI_PROPERTY("testStrictEquals", testStrictEquals), @@ -222,6 +232,7 @@ void Init(napi_env env, napi_value exports, napi_value module, void* priv) { DECLARE_NAPI_PROPERTY("testFinalizeWrap", test_finalize_wrap), DECLARE_NAPI_PROPERTY("finalizeWasCalled", finalize_was_called), DECLARE_NAPI_PROPERTY("derefItemWasCalled", deref_item_was_called), + DECLARE_NAPI_PROPERTY("testAdjustExternalMemory", testAdjustExternalMemory) }; NAPI_CALL_RETURN_VOID(env, napi_define_properties( From 3b4708b025c4a4ad4e99d980a64a7c49a84cc9bf Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Wed, 6 Sep 2017 13:00:43 +0300 Subject: [PATCH 143/227] n-api: implement napi_run_script Fixes: https://github.com/nodejs/abi-stable-node/issues/51 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/15216 Reviewed-By: Anna Henningsen Reviewed-By: Benjamin Gruenbaum Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson Reviewed-By: Timothy Gu --- doc/api/n-api.md | 21 +++++++++++++++ src/node_api.cc | 27 ++++++++++++++++++++ src/node_api.h | 5 ++++ test/addons-napi/test_general/testNapiRun.js | 12 +++++++++ test/addons-napi/test_general/test_general.c | 13 ++++++++++ 5 files changed, 78 insertions(+) create mode 100644 test/addons-napi/test_general/testNapiRun.js diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 9c56e50e83f299..80d83612509ca1 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -43,6 +43,7 @@ The documentation for N-API is structured as follows: * [Object Wrap][] * [Asynchronous Operations][] * [Promises][] +* [Script Execution][] The N-API is a C API that ensures ABI stability across Node.js versions and different compiler levels. However, we also understand that a C++ @@ -3556,6 +3557,25 @@ NAPI_EXTERN napi_status napi_is_promise(napi_env env, - `[out] is_promise`: Flag indicating whether `promise` is a native promise object - that is, a promise object created by the underlying engine. +## Script execution + +N-API provides an API for executing a string containing JavaScript using the +underlying JavaScript engine. + +### napi_run_script + +```C +NAPI_EXTERN napi_status napi_run_script(napi_env env, + napi_value script, + napi_value* result); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] script`: A JavaScript string containing the script to execute. +- `[out] result`: The value resulting from having executed the script. + [Promises]: #n_api_promises [Asynchronous Operations]: #n_api_asynchronous_operations [Basic N-API Data Types]: #n_api_basic_n_api_data_types @@ -3565,6 +3585,7 @@ object - that is, a promise object created by the underlying engine. [Native Abstractions for Node.js]: https://github.com/nodejs/nan [Object Lifetime Management]: #n_api_object_lifetime_management [Object Wrap]: #n_api_object_wrap +[Script Execution]: #n_api_script_execution [Section 9.1.6]: https://tc39.github.io/ecma262/#sec-ordinary-object-internal-methods-and-internal-slots-defineownproperty-p-desc [Section 12.5.5]: https://tc39.github.io/ecma262/#sec-typeof-operator [Section 24.3]: https://tc39.github.io/ecma262/#sec-dataview-objects diff --git a/src/node_api.cc b/src/node_api.cc index a2322295fccbaa..9c8e0edec94d40 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -3426,3 +3426,30 @@ NAPI_EXTERN napi_status napi_is_promise(napi_env env, return napi_clear_last_error(env); } + +NAPI_EXTERN napi_status napi_run_script(napi_env env, + napi_value script, + napi_value* result) { + NAPI_PREAMBLE(env); + CHECK_ARG(env, script); + CHECK_ARG(env, result); + + v8::Local v8_script = v8impl::V8LocalValueFromJsValue(script); + + if (!v8_script->IsString()) { + return napi_set_last_error(env, napi_string_expected); + } + + v8::Local context = env->isolate->GetCurrentContext(); + + auto maybe_script = v8::Script::Compile(context, + v8::Local::Cast(v8_script)); + CHECK_MAYBE_EMPTY(env, maybe_script, napi_generic_failure); + + auto script_result = + maybe_script.ToLocalChecked()->Run(context); + CHECK_MAYBE_EMPTY(env, script_result, napi_generic_failure); + + *result = v8impl::JsValueFromV8LocalValue(script_result.ToLocalChecked()); + return GET_RETURN_STATUS(env); +} diff --git a/src/node_api.h b/src/node_api.h index 702ddf2d9e6a0e..807595777dd947 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -562,6 +562,11 @@ NAPI_EXTERN napi_status napi_adjust_external_memory(napi_env env, int64_t change_in_bytes, int64_t* adjusted_value); +// Runnig a script +NAPI_EXTERN napi_status napi_run_script(napi_env env, + napi_value script, + napi_value* result); + EXTERN_C_END #endif // SRC_NODE_API_H_ diff --git a/test/addons-napi/test_general/testNapiRun.js b/test/addons-napi/test_general/testNapiRun.js new file mode 100644 index 00000000000000..d7534ecf9c3912 --- /dev/null +++ b/test/addons-napi/test_general/testNapiRun.js @@ -0,0 +1,12 @@ +'use strict'; + +const common = require('../../common'); +const assert = require('assert'); + +// addon is referenced through the eval expression in testFile +// eslint-disable-next-line no-unused-vars +const addon = require(`./build/${common.buildType}/test_general`); + +assert.strictEqual(addon.testNapiRun('(41.92 + 0.08);'), 42, + 'napi_run_script() works correctly'); +assert.throws(() => addon.testNapiRun({ abc: 'def' }), /string was expected/); diff --git a/test/addons-napi/test_general/test_general.c b/test/addons-napi/test_general/test_general.c index ea1f2ece0a5d30..287c1d7cb8cb34 100644 --- a/test/addons-napi/test_general/test_general.c +++ b/test/addons-napi/test_general/test_general.c @@ -1,4 +1,5 @@ #include +#include #include "../common.h" napi_value testStrictEquals(napi_env env, napi_callback_info info) { @@ -215,12 +216,24 @@ napi_value testAdjustExternalMemory(napi_env env, napi_callback_info info) { return result; } +napi_value testNapiRun(napi_env env, napi_callback_info info) { + napi_value script, result; + size_t argc = 1; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &script, NULL, NULL)); + + NAPI_CALL(env, napi_run_script(env, script, &result)); + + return result; +} + void Init(napi_env env, napi_value exports, napi_value module, void* priv) { napi_property_descriptor descriptors[] = { DECLARE_NAPI_PROPERTY("testStrictEquals", testStrictEquals), DECLARE_NAPI_PROPERTY("testGetPrototype", testGetPrototype), DECLARE_NAPI_PROPERTY("testGetVersion", testGetVersion), DECLARE_NAPI_PROPERTY("testGetNodeVersion", testGetNodeVersion), + DECLARE_NAPI_PROPERTY("testNapiRun", testNapiRun), DECLARE_NAPI_PROPERTY("doInstanceOf", doInstanceOf), DECLARE_NAPI_PROPERTY("getUndefined", getUndefined), DECLARE_NAPI_PROPERTY("getNull", getNull), From 676cff48bd5a2efd33906b821442c284cbb44014 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Sat, 9 Sep 2017 12:15:14 +0300 Subject: [PATCH 144/227] n-api: stop creating references to primitives The binding testing napi_wrap() creates references to primitives passed into the binding in its second parameter. This is unnecessary and not at all the point of the test. Additionally, creating persistent references to primitive values may not be supported by all VMs, since primitives are best persisted in their native form. Instead, the point of the test is to make sure that the finalize callback gets called when it should get called, that it gets called with the correct pointer, and that it does not get called when it should not get called. Creating persistent references is not necessary for verifying this. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/15289 Reviewed-By: Jason Ginchereau Reviewed-By: Colin Ihrig Re: https://github.com/nodejs/node-chakracore/issues/380 --- doc/api/n-api.md | 2 +- src/node_api.cc | 10 ++++++++-- test/addons-napi/test_general/test.js | 10 +++++----- test/addons-napi/test_general/test_general.c | 17 +++++++---------- 4 files changed, 21 insertions(+), 18 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 80d83612509ca1..0efa21056f0e74 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -787,7 +787,7 @@ NODE_EXTERN napi_status napi_create_reference(napi_env env, - `[in] env`: The environment that the API is invoked under. - `[in] value`: `napi_value` representing the Object to which we want -a reference to. +a reference. - `[in] initial_refcount`: Initial reference count for the new reference. - `[out] result`: `napi_ref` pointing to the new reference. diff --git a/src/node_api.cc b/src/node_api.cc index 9c8e0edec94d40..5b70b9678dd2e7 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -2461,8 +2461,14 @@ napi_status napi_create_reference(napi_env env, CHECK_ARG(env, value); CHECK_ARG(env, result); - v8impl::Reference* reference = v8impl::Reference::New( - env, v8impl::V8LocalValueFromJsValue(value), initial_refcount, false); + v8::Local v8_value = v8impl::V8LocalValueFromJsValue(value); + + if (!(v8_value->IsObject() || v8_value->IsFunction())) { + return napi_set_last_error(env, napi_object_expected); + } + + v8impl::Reference* reference = + v8impl::Reference::New(env, v8_value, initial_refcount, false); *result = reinterpret_cast(reference); return napi_clear_last_error(env); diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js index ec59922639e949..51ce9563a7196e 100644 --- a/test/addons-napi/test_general/test.js +++ b/test/addons-napi/test_general/test.js @@ -60,7 +60,7 @@ assert.strictEqual(test_general.testNapiTypeof(null), 'null'); // Ensure that garbage collecting an object with a wrapped native item results // in the finalize callback being called. let w = {}; -test_general.wrap(w, []); +test_general.wrap(w); w = null; global.gc(); assert.strictEqual(test_general.derefItemWasCalled(), true, @@ -69,17 +69,17 @@ assert.strictEqual(test_general.derefItemWasCalled(), true, // Assert that wrapping twice fails. const x = {}; -test_general.wrap(x, 25); +test_general.wrap(x); assert.throws(function() { - test_general.wrap(x, 'Blah'); + test_general.wrap(x); }, Error); // Ensure that wrapping, removing the wrap, and then wrapping again works. const y = {}; -test_general.wrap(y, -12); +test_general.wrap(y); test_general.removeWrap(y); assert.doesNotThrow(function() { - test_general.wrap(y, 're-wrap!'); + test_general.wrap(y); }, Error, 'Wrapping twice succeeds if a remove_wrap() separates the instances'); // Ensure that removing a wrap and garbage collecting does not fire the diff --git a/test/addons-napi/test_general/test_general.c b/test/addons-napi/test_general/test_general.c index 287c1d7cb8cb34..048066d25c789b 100644 --- a/test/addons-napi/test_general/test_general.c +++ b/test/addons-napi/test_general/test_general.c @@ -143,8 +143,10 @@ static bool deref_item_called = false; static void deref_item(napi_env env, void* data, void* hint) { (void) hint; + NAPI_ASSERT_RETURN_VOID(env, data == &deref_item_called, + "Finalize callback was called with the correct pointer"); + deref_item_called = true; - NAPI_CALL_RETURN_VOID(env, napi_delete_reference(env, (napi_ref)data)); } napi_value deref_item_was_called(napi_env env, napi_callback_info info) { @@ -156,15 +158,13 @@ napi_value deref_item_was_called(napi_env env, napi_callback_info info) { } napi_value wrap(napi_env env, napi_callback_info info) { - size_t argc = 2; - napi_value argv[2]; - napi_ref payload; + size_t argc = 1; + napi_value to_wrap; deref_item_called = false; - NAPI_CALL(env, napi_get_cb_info(env, info, &argc, argv, NULL, NULL)); - NAPI_CALL(env, napi_create_reference(env, argv[1], 1, &payload)); - NAPI_CALL(env, napi_wrap(env, argv[0], payload, deref_item, NULL, NULL)); + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &to_wrap, NULL, NULL)); + NAPI_CALL(env, napi_wrap(env, to_wrap, &deref_item_called, deref_item, NULL, NULL)); return NULL; } @@ -176,9 +176,6 @@ napi_value remove_wrap(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &wrapped, NULL, NULL)); NAPI_CALL(env, napi_remove_wrap(env, wrapped, &data)); - if (data != NULL) { - NAPI_CALL(env, napi_delete_reference(env, (napi_ref)data)); - } return NULL; } From 7588eead2af6626fefcfb2bd434df302234a35f6 Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Tue, 8 Aug 2017 20:18:16 +0200 Subject: [PATCH 145/227] n-api: use AsyncResource for Work tracking MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Enable combining N-API async work with async-hooks. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14697 Reviewed-By: Tobias Nießen Reviewed-By: Refael Ackermann Reviewed-By: James M Snell Reviewed-By: Jason Ginchereau Reviewed-By: Michael Dawson --- doc/api/n-api.md | 20 ++++++++++++++ src/node_api.cc | 32 +++++++++++++++++++---- src/node_api.h | 2 ++ test/addons-napi/test_async/test.js | 4 +-- test/addons-napi/test_async/test_async.cc | 31 +++++++++++++--------- 5 files changed, 69 insertions(+), 20 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 0efa21056f0e74..0f04c21bd7fa37 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3256,10 +3256,16 @@ callback invocation, even when it was cancelled. ### napi_create_async_work ```C NAPI_EXTERN napi_status napi_create_async_work(napi_env env, + napi_value async_resource, + const char* async_resource_name, napi_async_execute_callback execute, napi_async_complete_callback complete, void* data, @@ -3267,6 +3273,10 @@ napi_status napi_create_async_work(napi_env env, ``` - `[in] env`: The environment that the API is invoked under. +- `[in] async_resource`: An optional object associated with the async work + that will be passed to possible async_hooks [`init` hooks][]. +- `[in] async_resource_name`: An identifier for the kind of resource that is +being provided for diagnostic information exposed by the `async_hooks` API. - `[in] execute`: The native function which should be called to excute the logic asynchronously. - `[in] complete`: The native function which will be called when the @@ -3282,6 +3292,14 @@ This API allocates a work object that is used to execute logic asynchronously. It should be freed using [`napi_delete_async_work`][] once the work is no longer required. +`async_resource_name` should be a null-terminated, UTF-8-encoded string. + +*Note*: The `async_resource_name` identifier is provided by the user and should +be representative of the type of async work being performed. It is also +recommended to apply namespacing to the identifier, e.g. by including the +module name. See the [`async_hooks` documentation][async_hooks `type`] +for more information. + ### napi_delete_async_work ```C -NAPI_EXTERN napi_status +napi_status napi_get_last_error_info(napi_env env, const napi_extended_error_info** result); ``` @@ -515,8 +516,8 @@ This API returns a JavaScript RangeError with the text provided. added: v8.0.0 --> ```C -NAPI_EXTERN napi_status napi_get_and_clear_last_exception(napi_env env, - napi_value* result); +napi_status napi_get_and_clear_last_exception(napi_env env, + napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. @@ -531,7 +532,7 @@ This API returns true if an exception is pending. added: v8.0.0 --> ```C -NAPI_EXTERN napi_status napi_is_exception_pending(napi_env env, bool* result); +napi_status napi_is_exception_pending(napi_env env, bool* result); ``` - `[in] env`: The environment that the API is invoked under. @@ -551,7 +552,7 @@ thrown to immediately terminate the process. added: REPLACEME --> ```C -NAPI_EXTERN NAPI_NO_RETURN void napi_fatal_error(const char* location, const char* message); +NAPI_NO_RETURN void napi_fatal_error(const char* location, const char* message); ``` - `[in] location`: Optional location at which the error occurred. @@ -718,10 +719,10 @@ reverse order from which they were created. added: v8.0.0 --> ```C -NAPI_EXTERN napi_status napi_escape_handle(napi_env env, - napi_escapable_handle_scope scope, - napi_value escapee, - napi_value* result); +napi_status napi_escape_handle(napi_env env, + napi_escapable_handle_scope scope, + napi_value escapee, + napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. @@ -1478,10 +1479,10 @@ of the ECMAScript Language Specification. added: v8.0.0 --> ```C -NAPI_EXTERN napi_status napi_create_string_latin1(napi_env env, - const char* str, - size_t length, - napi_value* result); +napi_status napi_create_string_latin1(napi_env env, + const char* str, + size_t length, + napi_value* result); ``` - `[in] env`: The environment that the API is invoked under. @@ -1811,11 +1812,11 @@ JavaScript Number added: v8.0.0 --> ```C -NAPI_EXTERN napi_status napi_get_value_string_latin1(napi_env env, - napi_value value, - char* buf, - size_t bufsize, - size_t* result) +napi_status napi_get_value_string_latin1(napi_env env, + napi_value value, + char* buf, + size_t bufsize, + size_t* result) ``` - `[in] env`: The environment that the API is invoked under. @@ -2790,8 +2791,8 @@ in as arguments to the function. Returns `napi_ok` if the API succeeded. This method allows a JavaScript function object to be called from a native -add-on. This is an primary mechanism of calling back *from* the add-on's -native code *into* JavaScript. For special cases like calling into JavaScript +add-on. This is the primary mechanism of calling back *from* the add-on's +native code *into* JavaScript. For the special case of calling into JavaScript after an async operation, see [`napi_make_callback`][]. A sample use case might look as follows. Consider the following JavaScript @@ -3003,39 +3004,6 @@ status = napi_new_instance(env, constructor, argc, argv, &value); Returns `napi_ok` if the API succeeded. -### *napi_make_callback* - -```C -napi_status napi_make_callback(napi_env env, - napi_value recv, - napi_value func, - int argc, - const napi_value* argv, - napi_value* result) -``` - -- `[in] env`: The environment that the API is invoked under. -- `[in] recv`: The `this` object passed to the called function. -- `[in] func`: `napi_value` representing the JavaScript function -to be invoked. -- `[in] argc`: The count of elements in the `argv` array. -- `[in] argv`: Array of JavaScript values as `napi_value` -representing the arguments to the function. -- `[out] result`: `napi_value` representing the JavaScript object returned. - -Returns `napi_ok` if the API succeeded. - -This method allows a JavaScript function object to be called from a native -add-on. This API is similar to `napi_call_function`. However, it is used to call -*from* native code back *into* JavaScript *after* returning from an async -operation (when there is no other script on the stack). It is a fairly simple -wrapper around `node::MakeCallback`. - -For an example on how to use `napi_make_callback`, see the section on -[Asynchronous Operations][]. - ## Object Wrap N-API offers a way to "wrap" C++ classes and instances so that the class @@ -3214,7 +3182,7 @@ restoring the JavaScript object's prototype chain. If a finalize callback was associated with the wrapping, it will no longer be called when the JavaScript object becomes garbage-collected. -## Asynchronous Operations +## Simple Asynchronous Operations Addon modules often need to leverage async helpers from libuv as part of their implementation. This allows them to schedule work to be executed asynchronously @@ -3250,8 +3218,8 @@ Once created the async worker can be queued for execution using the [`napi_queue_async_work`][] function: ```C -NAPI_EXTERN napi_status napi_queue_async_work(napi_env env, - napi_async_work work); +napi_status napi_queue_async_work(napi_env env, + napi_async_work work); ``` [`napi_cancel_async_work`][] can be used if the work needs @@ -3271,7 +3239,6 @@ changes: description: Added `async_resource` and `async_resource_name` parameters. --> ```C -NAPI_EXTERN napi_status napi_create_async_work(napi_env env, napi_value async_resource, napi_value async_resource_name, @@ -3314,8 +3281,8 @@ for more information. added: v8.0.0 --> ```C -NAPI_EXTERN napi_status napi_delete_async_work(napi_env env, - napi_async_work work); +napi_status napi_delete_async_work(napi_env env, + napi_async_work work); ``` - `[in] env`: The environment that the API is invoked under. @@ -3330,8 +3297,8 @@ This API frees a previously allocated work object. added: v8.0.0 --> ```C -NAPI_EXTERN napi_status napi_queue_async_work(napi_env env, - napi_async_work work); +napi_status napi_queue_async_work(napi_env env, + napi_async_work work); ``` - `[in] env`: The environment that the API is invoked under. @@ -3347,8 +3314,8 @@ for execution. added: v8.0.0 --> ```C -NAPI_EXTERN napi_status napi_cancel_async_work(napi_env env, - napi_async_work work); +napi_status napi_cancel_async_work(napi_env env, + napi_async_work work); ``` - `[in] env`: The environment that the API is invoked under. @@ -3363,6 +3330,93 @@ the `complete` callback will be invoked with a status value of `napi_cancelled`. The work should not be deleted before the `complete` callback invocation, even if it has been successfully cancelled. +## Custom Asynchronous Operations +The simple asynchronous work APIs above may not be appropriate for every +scenario, because with those the async execution still happens on the main +event loop. When using any other async mechanism, the following APIs are +necessary to ensure an async operation is properly tracked by the runtime. + +### *napi_async_init** + +```C +napi_status napi_async_init(napi_env env, + napi_value async_resource, + napi_value async_resource_name, + napi_async_context* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] async_resource`: An optional object associated with the async work + that will be passed to possible `async_hooks` [`init` hooks][]. +- `[in] async_resource_name`: Required identifier for the kind of resource + that is being provided for diagnostic information exposed by the + `async_hooks` API. +- `[out] result`: The initialized async context. + +Returns `napi_ok` if the API succeeded. + +### *napi_async_destroy** + +```C +napi_status napi_async_destroy(napi_env env, + napi_async_context async_context); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] async_context`: The async context to be destroyed. + +Returns `napi_ok` if the API succeeded. + +### *napi_make_callback* + +```C +napi_status napi_make_callback(napi_env env, + napi_async_context async_context, + napi_value recv, + napi_value func, + int argc, + const napi_value* argv, + napi_value* result) +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] async_context`: Context for the async operation that is + invoking the callback. This should normally be a value previously + obtained from [`napi_async_init`][]. However `NULL` is also allowed, + which indicates the current async context (if any) is to be used + for the callback. +- `[in] recv`: The `this` object passed to the called function. +- `[in] func`: `napi_value` representing the JavaScript function +to be invoked. +- `[in] argc`: The count of elements in the `argv` array. +- `[in] argv`: Array of JavaScript values as `napi_value` +representing the arguments to the function. +- `[out] result`: `napi_value` representing the JavaScript object returned. + +Returns `napi_ok` if the API succeeded. + +This method allows a JavaScript function object to be called from a native +add-on. This API is similar to `napi_call_function`. However, it is used to call +*from* native code back *into* JavaScript *after* returning from an async +operation (when there is no other script on the stack). It is a fairly simple +wrapper around `node::MakeCallback`. + +Note it is *not* necessary to use `napi_make_callback` from within a +`napi_async_complete_callback`; in that situation the callback's async +context has already been set up, so a direct call to `napi_call_function` +is sufficient and appropriate. Use of the `napi_make_callback` function +may be required when implementing custom async behavior that does not use +`napi_create_async_work`. + ## Version Management ### napi_get_node_version @@ -3378,7 +3432,6 @@ typedef struct { const char* release; } napi_node_version; -NAPI_EXTERN napi_status napi_get_node_version(napi_env env, const napi_node_version** version); ``` @@ -3399,8 +3452,8 @@ The returned buffer is statically allocated and does not need to be freed. added: REPLACEME --> ```C -NAPI_EXTERN napi_status napi_get_version(napi_env env, - uint32_t* result); +napi_status napi_get_version(napi_env env, + uint32_t* result); ``` - `[in] env`: The environment that the API is invoked under. @@ -3508,9 +3561,9 @@ deferred = NULL; added: REPLACEME --> ```C -NAPI_EXTERN napi_status napi_create_promise(napi_env env, - napi_deferred* deferred, - napi_value* promise); +napi_status napi_create_promise(napi_env env, + napi_deferred* deferred, + napi_value* promise); ``` - `[in] env`: The environment that the API is invoked under. @@ -3528,9 +3581,9 @@ This API creates a deferred object and a JavaScript promise. added: REPLACEME --> ```C -NAPI_EXTERN napi_status napi_resolve_deferred(napi_env env, - napi_deferred deferred, - napi_value resolution); +napi_status napi_resolve_deferred(napi_env env, + napi_deferred deferred, + napi_value resolution); ``` - `[in] env`: The environment that the API is invoked under. @@ -3551,9 +3604,9 @@ The deferred object is freed upon successful completion. added: REPLACEME --> ```C -NAPI_EXTERN napi_status napi_reject_deferred(napi_env env, - napi_deferred deferred, - napi_value rejection); +napi_status napi_reject_deferred(napi_env env, + napi_deferred deferred, + napi_value rejection); ``` - `[in] env`: The environment that the API is invoked under. @@ -3574,9 +3627,9 @@ The deferred object is freed upon successful completion. added: REPLACEME --> ```C -NAPI_EXTERN napi_status napi_is_promise(napi_env env, - napi_value promise, - bool* is_promise); +napi_status napi_is_promise(napi_env env, + napi_value promise, + bool* is_promise); ``` - `[in] env`: The environment that the API is invoked under. @@ -3604,7 +3657,8 @@ NAPI_EXTERN napi_status napi_run_script(napi_env env, - `[out] result`: The value resulting from having executed the script. [Promises]: #n_api_promises -[Asynchronous Operations]: #n_api_asynchronous_operations +[Simple Asynchronous Operations]: #n_api_asynchronous_operations +[Custom Asynchronous Operations]: #n_api_custom_asynchronous_operations [Basic N-API Data Types]: #n_api_basic_n_api_data_types [ECMAScript Language Specification]: https://tc39.github.io/ecma262/ [Error Handling]: #n_api_error_handling diff --git a/src/node_api.cc b/src/node_api.cc index 5efb6b4f549e6a..246b3fd8085a41 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -2770,7 +2770,52 @@ napi_status napi_instanceof(napi_env env, return GET_RETURN_STATUS(env); } +napi_status napi_async_init(napi_env env, + napi_value async_resource, + napi_value async_resource_name, + napi_async_context* result) { + CHECK_ENV(env); + CHECK_ARG(env, async_resource_name); + CHECK_ARG(env, result); + + v8::Isolate* isolate = env->isolate; + v8::Local context = isolate->GetCurrentContext(); + + v8::Local v8_resource; + if (async_resource != nullptr) { + CHECK_TO_OBJECT(env, context, v8_resource, async_resource); + } else { + v8_resource = v8::Object::New(isolate); + } + + v8::Local v8_resource_name; + CHECK_TO_STRING(env, context, v8_resource_name, async_resource_name); + + // TODO(jasongin): Consider avoiding allocation here by using + // a tagged pointer with 2×31 bit fields instead. + node::async_context* async_context = new node::async_context(); + + *async_context = node::EmitAsyncInit(isolate, v8_resource, v8_resource_name); + *result = reinterpret_cast(async_context); + + return napi_clear_last_error(env); +} + +napi_status napi_async_destroy(napi_env env, + napi_async_context async_context) { + CHECK_ENV(env); + CHECK_ARG(env, async_context); + + v8::Isolate* isolate = env->isolate; + node::async_context* node_async_context = + reinterpret_cast(async_context); + node::EmitAsyncDestroy(isolate, *node_async_context); + + return napi_clear_last_error(env); +} + napi_status napi_make_callback(napi_env env, + napi_async_context async_context, napi_value recv, napi_value func, size_t argc, @@ -2791,12 +2836,22 @@ napi_status napi_make_callback(napi_env env, v8::Local v8func; CHECK_TO_FUNCTION(env, v8func, func); - v8::Local callback_result = node::MakeCallback( + node::async_context* node_async_context = + reinterpret_cast(async_context); + if (node_async_context == nullptr) { + static node::async_context empty_context = { 0, 0 }; + node_async_context = &empty_context; + } + + v8::MaybeLocal callback_result = node::MakeCallback( isolate, v8recv, v8func, argc, - reinterpret_cast*>(const_cast(argv))); + reinterpret_cast*>(const_cast(argv)), + *node_async_context); + CHECK_MAYBE_EMPTY(env, callback_result, napi_generic_failure); if (result != nullptr) { - *result = v8impl::JsValueFromV8LocalValue(callback_result); + *result = v8impl::JsValueFromV8LocalValue( + callback_result.ToLocalChecked()); } return GET_RETURN_STATUS(env); diff --git a/src/node_api.h b/src/node_api.h index cdc3a6bdb7ebe3..c7d5e746f1b98d 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -318,14 +318,6 @@ NAPI_EXTERN napi_status napi_instanceof(napi_env env, napi_value constructor, bool* result); -// Napi version of node::MakeCallback(...) -NAPI_EXTERN napi_status napi_make_callback(napi_env env, - napi_value recv, - napi_value func, - size_t argc, - const napi_value* argv, - napi_value* result); - // Methods to work with napi_callbacks // Gets all callback info in a single call. (Ugly, but faster.) @@ -535,6 +527,22 @@ NAPI_EXTERN napi_status napi_queue_async_work(napi_env env, NAPI_EXTERN napi_status napi_cancel_async_work(napi_env env, napi_async_work work); +// Methods for custom handling of async operations +NAPI_EXTERN napi_status napi_async_init(napi_env env, + napi_value async_resource, + napi_value async_resource_name, + napi_async_context* result); + +NAPI_EXTERN napi_status napi_async_destroy(napi_env env, + napi_async_context async_context); + +NAPI_EXTERN napi_status napi_make_callback(napi_env env, + napi_async_context async_context, + napi_value recv, + napi_value func, + size_t argc, + const napi_value* argv, + napi_value* result); // version management NAPI_EXTERN napi_status napi_get_version(napi_env env, uint32_t* result); diff --git a/src/node_api_backport.cc b/src/node_api_backport.cc index 249e3e8100efef..bd2c33242a4af7 100644 --- a/src/node_api_backport.cc +++ b/src/node_api_backport.cc @@ -81,6 +81,26 @@ AsyncResource::~AsyncResource() { object.Reset(); } +async_context EmitAsyncInit(v8::Isolate* isolate, + v8::Local resource, + v8::Local name, + async_id trigger_async_id) { + return async_context(); +} + +void EmitAsyncDestroy(v8::Isolate* isolate, + async_context asyncContext) { +} + +v8::MaybeLocal MakeCallback(v8::Isolate* isolate, + v8::Local recv, + v8::Local callback, + int argc, + v8::Local* argv, + async_context asyncContext) { + return node::MakeCallback(isolate, recv, callback, argc, argv); +} + } // end of namespace node CallbackScope::CallbackScope(node::AsyncResource* work) : diff --git a/src/node_api_backport.h b/src/node_api_backport.h index 549b3ed42033de..9ef12cebd8f677 100644 --- a/src/node_api_backport.h +++ b/src/node_api_backport.h @@ -31,6 +31,21 @@ class CallbackScope { Environment::AsyncCallbackScope callback_scope; }; +NODE_EXTERN async_context EmitAsyncInit(v8::Isolate* isolate, + v8::Local resource, + v8::Local name, + async_id trigger_async_id = -1); + +NODE_EXTERN void EmitAsyncDestroy(v8::Isolate* isolate, + async_context asyncContext); + +v8::MaybeLocal MakeCallback(v8::Isolate* isolate, + v8::Local recv, + v8::Local callback, + int argc, + v8::Local* argv, + async_context asyncContext); + class AsyncResource { public: AsyncResource(v8::Isolate* _isolate, diff --git a/src/node_api_types.h b/src/node_api_types.h index ac8482bf9dcf9d..574cb6ff984f16 100644 --- a/src/node_api_types.h +++ b/src/node_api_types.h @@ -16,6 +16,7 @@ typedef struct napi_ref__ *napi_ref; typedef struct napi_handle_scope__ *napi_handle_scope; typedef struct napi_escapable_handle_scope__ *napi_escapable_handle_scope; typedef struct napi_callback_info__ *napi_callback_info; +typedef struct napi_async_context__ *napi_async_context; typedef struct napi_async_work__ *napi_async_work; typedef struct napi_deferred__ *napi_deferred; diff --git a/test/addons-napi/test_make_callback/binding.cc b/test/addons-napi/test_make_callback/binding.cc index 1ba28c0ef40b72..9032e8b2f3f301 100644 --- a/test/addons-napi/test_make_callback/binding.cc +++ b/test/addons-napi/test_make_callback/binding.cc @@ -24,14 +24,22 @@ napi_value MakeCallback(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_typeof(env, func, &func_type)); + napi_value resource_name; + NAPI_CALL(env, napi_create_string_utf8(env, "test", -1, &resource_name)); + + napi_async_context context; + NAPI_CALL(env, napi_async_init(env, func, resource_name, &context)); + napi_value result; if (func_type == napi_function) { - NAPI_CALL(env, - napi_make_callback(env, recv, func, argv.size(), argv.data(), &result)); + NAPI_CALL(env, napi_make_callback( + env, context, recv, func, argv.size(), argv.data(), &result)); } else { NAPI_ASSERT(env, false, "Unexpected argument type"); } + NAPI_CALL(env, napi_async_destroy(env, context)); + return result; } diff --git a/test/addons-napi/test_make_callback/test.js b/test/addons-napi/test_make_callback/test.js index c4f24872bdb78e..56e2b3f4e2b6c6 100644 --- a/test/addons-napi/test_make_callback/test.js +++ b/test/addons-napi/test_make_callback/test.js @@ -7,7 +7,6 @@ const binding = require(`./build/${common.buildType}/binding`); const makeCallback = binding.makeCallback; function myMultiArgFunc(arg1, arg2, arg3) { - console.log(`MyFunc was called with ${arguments.length} arguments`); assert.strictEqual(arg1, 1); assert.strictEqual(arg2, 2); assert.strictEqual(arg3, 3); diff --git a/test/addons-napi/test_make_callback_recurse/binding.cc b/test/addons-napi/test_make_callback_recurse/binding.cc index f3c80ab103406e..d0ee1ddb4998e6 100644 --- a/test/addons-napi/test_make_callback_recurse/binding.cc +++ b/test/addons-napi/test_make_callback_recurse/binding.cc @@ -12,8 +12,8 @@ napi_value MakeCallback(napi_env env, napi_callback_info info) { napi_value recv = args[0]; napi_value func = args[1]; - napi_make_callback(env, - recv, func, 0 /* argc */, nullptr /* argv */, nullptr /* result */); + napi_make_callback(env, nullptr /* async_context */, + recv, func, 0 /* argc */, nullptr /* argv */, nullptr /* result */); return recv; } From fe87a5944b088ad66ef389d23c0d00981b64c462 Mon Sep 17 00:00:00 2001 From: Sampson Gao Date: Tue, 8 Aug 2017 16:21:56 -0400 Subject: [PATCH 149/227] n-api: napi_is_construct_call->napi_get_new_target Remove napi_is_construct_call and introduce napi_get_new_target. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14698 Reviewed-By: Jason Ginchereau Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson Reviewed-By: James M Snell Reviewed-By: Kyle Farnung --- doc/api/n-api.md | 17 ++++++++-------- src/node_api.cc | 23 ++++++++++++---------- src/node_api.h | 6 +++--- test/addons-napi/6_object_wrap/myobject.cc | 5 +++-- 4 files changed, 27 insertions(+), 24 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 1d9b405c78a07e..4797e62fd97d46 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -2928,25 +2928,24 @@ Returns `napi_ok` if the API succeeded. This method is used within a callback function to retrieve details about the call like the arguments and the `this` pointer from a given callback info. -### *napi_is_construct_call* +### *napi_get_new_target* ```C -napi_status napi_is_construct_call(napi_env env, - napi_callback_info cbinfo, - bool* result) +napi_status napi_get_new_target(napi_env env, + napi_callback_info cbinfo, + napi_value* result) ``` - `[in] env`: The environment that the API is invoked under. - `[in] cbinfo`: The callback info passed into the callback function. -- `[out] result`: Whether the native function is being invoked as -a constructor call. +- `[out] result`: The `new.target` of the constructor call. Returns `napi_ok` if the API succeeded. -This API checks if the the current callback was due to a -consructor call. +This API returns the `new.target` of the constructor call. If the current +callback is not a constructor call, the result is `nullptr`. ### *napi_new_instance* ```C -NAPI_NO_RETURN void napi_fatal_error(const char* location, const char* message); +NAPI_NO_RETURN void napi_fatal_error(const char* location, + size_t location_len, + const char* message, + size_t message_len); ``` - `[in] location`: Optional location at which the error occurred. +- `[in] location_len`: The length of the location in bytes, or -1 if it is +null-terminated. - `[in] message`: The message associated with the error. +- `[in] message_len`: The length of the message in bytes, or -1 if it is +null-terminated. The function call does not return, the process will be terminated. @@ -1248,6 +1255,7 @@ added: v8.0.0 ```C napi_status napi_create_function(napi_env env, const char* utf8name, + size_t length, napi_callback cb, void* data, napi_value* result) @@ -1256,6 +1264,8 @@ napi_status napi_create_function(napi_env env, - `[in] env`: The environment that the API is invoked under. - `[in] utf8name`: A string representing the name of the function encoded as UTF8. +- `[in] length`: The length of the utf8name in bytes, or -1 if it is +null-terminated. - `[in] cb`: A function pointer to the native function to be invoked when the created function is invoked from JavaScript. - `[in] data`: Optional arbitrary context data to be passed into the native @@ -3026,6 +3036,7 @@ added: v8.0.0 ```C napi_status napi_define_class(napi_env env, const char* utf8name, + size_t length, napi_callback constructor, void* data, size_t property_count, @@ -3037,6 +3048,8 @@ napi_status napi_define_class(napi_env env, - `[in] utf8name`: Name of the JavaScript constructor function; this is not required to be the same as the C++ class name, though it is recommended for clarity. + - `[in] length`: The length of the utf8name in bytes, or -1 if it is +null-terminated. - `[in] constructor`: Callback function that handles constructing instances of the class. (This should be a static method on the class, not an actual C++ constructor function.) diff --git a/src/node_api.cc b/src/node_api.cc index 7c68abfc6797de..44751a8f945da9 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -938,12 +938,29 @@ napi_status napi_get_last_error_info(napi_env env, } NAPI_NO_RETURN void napi_fatal_error(const char* location, - const char* message) { - node::FatalError(location, message); + size_t location_len, + const char* message, + size_t message_len) { + char* location_string = const_cast(location); + char* message_string = const_cast(message); + if (location_len != -1) { + location_string = reinterpret_cast( + malloc(location_len * sizeof(char) + 1)); + strncpy(location_string, location, location_len); + location_string[location_len] = '\0'; + } + if (message_len != -1) { + message_string = reinterpret_cast( + malloc(message_len * sizeof(char) + 1)); + strncpy(message_string, message, message_len); + message_string[message_len] = '\0'; + } + node::FatalError(location_string, message_string); } napi_status napi_create_function(napi_env env, const char* utf8name, + size_t length, napi_callback cb, void* callback_data, napi_value* result) { @@ -970,7 +987,7 @@ napi_status napi_create_function(napi_env env, if (utf8name != nullptr) { v8::Local name_string; - CHECK_NEW_FROM_UTF8(env, name_string, utf8name); + CHECK_NEW_FROM_UTF8_LEN(env, name_string, utf8name, length); return_value->SetName(name_string); } @@ -981,6 +998,7 @@ napi_status napi_create_function(napi_env env, napi_status napi_define_class(napi_env env, const char* utf8name, + size_t length, napi_callback constructor, void* callback_data, size_t property_count, @@ -1002,7 +1020,7 @@ napi_status napi_define_class(napi_env env, isolate, v8impl::FunctionCallbackWrapper::Invoke, cbdata); v8::Local name_string; - CHECK_NEW_FROM_UTF8(env, name_string, utf8name); + CHECK_NEW_FROM_UTF8_LEN(env, name_string, utf8name, length); tpl->SetClassName(name_string); size_t static_property_count = 0; diff --git a/src/node_api.h b/src/node_api.h index e1a40983a9907d..82f2e1900e2fe4 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -109,7 +109,9 @@ napi_get_last_error_info(napi_env env, const napi_extended_error_info** result); NAPI_EXTERN NAPI_NO_RETURN void napi_fatal_error(const char* location, - const char* message); + size_t location_len, + const char* message, + size_t message_len); // Getters for defined singletons NAPI_EXTERN napi_status napi_get_undefined(napi_env env, napi_value* result); @@ -154,6 +156,7 @@ NAPI_EXTERN napi_status napi_create_symbol(napi_env env, napi_value* result); NAPI_EXTERN napi_status napi_create_function(napi_env env, const char* utf8name, + size_t length, napi_callback cb, void* data, napi_value* result); @@ -336,6 +339,7 @@ NAPI_EXTERN napi_status napi_get_new_target(napi_env env, NAPI_EXTERN napi_status napi_define_class(napi_env env, const char* utf8name, + size_t length, napi_callback constructor, void* data, size_t property_count, diff --git a/test/addons-napi/4_object_factory/binding.c b/test/addons-napi/4_object_factory/binding.c index fe509c02ad13b6..6bdca80ec365be 100644 --- a/test/addons-napi/4_object_factory/binding.c +++ b/test/addons-napi/4_object_factory/binding.c @@ -16,7 +16,7 @@ napi_value CreateObject(napi_env env, napi_callback_info info) { napi_value Init(napi_env env, napi_value exports) { NAPI_CALL(env, - napi_create_function(env, "exports", CreateObject, NULL, &exports)); + napi_create_function(env, "exports", -1, CreateObject, NULL, &exports)); return exports; } diff --git a/test/addons-napi/5_function_factory/binding.c b/test/addons-napi/5_function_factory/binding.c index a2e03dd9bdd6b5..6b2b6f8f0ebe81 100644 --- a/test/addons-napi/5_function_factory/binding.c +++ b/test/addons-napi/5_function_factory/binding.c @@ -4,21 +4,19 @@ napi_value MyFunction(napi_env env, napi_callback_info info) { napi_value str; NAPI_CALL(env, napi_create_string_utf8(env, "hello world", -1, &str)); - return str; } napi_value CreateFunction(napi_env env, napi_callback_info info) { napi_value fn; NAPI_CALL(env, - napi_create_function(env, "theFunction", MyFunction, NULL, &fn)); - + napi_create_function(env, "theFunction", -1, MyFunction, NULL, &fn)); return fn; } napi_value Init(napi_env env, napi_value exports) { NAPI_CALL(env, - napi_create_function(env, "exports", CreateFunction, NULL, &exports)); + napi_create_function(env, "exports", -1, CreateFunction, NULL, &exports)); return exports; } diff --git a/test/addons-napi/6_object_wrap/myobject.cc b/test/addons-napi/6_object_wrap/myobject.cc index 90815253add651..aca91877d3a2ae 100644 --- a/test/addons-napi/6_object_wrap/myobject.cc +++ b/test/addons-napi/6_object_wrap/myobject.cc @@ -22,8 +22,8 @@ void MyObject::Init(napi_env env, napi_value exports) { }; napi_value cons; - NAPI_CALL_RETURN_VOID(env, - napi_define_class(env, "MyObject", New, nullptr, 3, properties, &cons)); + NAPI_CALL_RETURN_VOID(env, napi_define_class( + env, "MyObject", -1, New, nullptr, 3, properties, &cons)); NAPI_CALL_RETURN_VOID(env, napi_create_reference(env, cons, 1, &constructor)); diff --git a/test/addons-napi/7_factory_wrap/binding.cc b/test/addons-napi/7_factory_wrap/binding.cc index 4d02247f85b924..9d8f23c561d77a 100644 --- a/test/addons-napi/7_factory_wrap/binding.cc +++ b/test/addons-napi/7_factory_wrap/binding.cc @@ -16,7 +16,7 @@ napi_value Init(napi_env env, napi_value exports) { NAPI_CALL(env, MyObject::Init(env)); NAPI_CALL(env, - napi_create_function(env, "exports", CreateObject, NULL, &exports)); + napi_create_function(env, "exports", -1, CreateObject, NULL, &exports)); return exports; } diff --git a/test/addons-napi/7_factory_wrap/myobject.cc b/test/addons-napi/7_factory_wrap/myobject.cc index c6d538a7cefbc6..4e1d79c1febc17 100644 --- a/test/addons-napi/7_factory_wrap/myobject.cc +++ b/test/addons-napi/7_factory_wrap/myobject.cc @@ -21,8 +21,8 @@ napi_status MyObject::Init(napi_env env) { }; napi_value cons; - status = - napi_define_class(env, "MyObject", New, nullptr, 1, properties, &cons); + status = napi_define_class( + env, "MyObject", -1, New, nullptr, 1, properties, &cons); if (status != napi_ok) return status; status = napi_create_reference(env, cons, 1, &constructor); diff --git a/test/addons-napi/8_passing_wrapped/myobject.cc b/test/addons-napi/8_passing_wrapped/myobject.cc index 0c24d7696e66ec..19cc7dd2a29493 100644 --- a/test/addons-napi/8_passing_wrapped/myobject.cc +++ b/test/addons-napi/8_passing_wrapped/myobject.cc @@ -17,7 +17,8 @@ napi_status MyObject::Init(napi_env env) { napi_status status; napi_value cons; - status = napi_define_class(env, "MyObject", New, nullptr, 0, nullptr, &cons); + status = napi_define_class( + env, "MyObject", -1, New, nullptr, 0, nullptr, &cons); if (status != napi_ok) return status; status = napi_create_reference(env, cons, 1, &constructor); diff --git a/test/addons-napi/test_constructor/binding.gyp b/test/addons-napi/test_constructor/binding.gyp index 55140e7c379f02..1945a9fd5a711e 100644 --- a/test/addons-napi/test_constructor/binding.gyp +++ b/test/addons-napi/test_constructor/binding.gyp @@ -3,6 +3,10 @@ { "target_name": "test_constructor", "sources": [ "test_constructor.c" ] + }, + { + "target_name": "test_constructor_name", + "sources": [ "test_constructor_name.c" ] } ] } diff --git a/test/addons-napi/test_constructor/test2.js b/test/addons-napi/test_constructor/test2.js new file mode 100644 index 00000000000000..64c03cbc684ac3 --- /dev/null +++ b/test/addons-napi/test_constructor/test2.js @@ -0,0 +1,8 @@ +'use strict'; +const common = require('../../common'); +const assert = require('assert'); + +// Testing api calls for a constructor that defines properties +const TestConstructor = + require(`./build/${common.buildType}/test_constructor_name`); +assert.strictEqual(TestConstructor.name, 'MyObject'); diff --git a/test/addons-napi/test_constructor/test_constructor.c b/test/addons-napi/test_constructor/test_constructor.c index 437c32e12517fc..8b5b4e53ea7bc0 100644 --- a/test/addons-napi/test_constructor/test_constructor.c +++ b/test/addons-napi/test_constructor/test_constructor.c @@ -77,7 +77,7 @@ napi_value Init(napi_env env, napi_value exports) { }; napi_value cons; - NAPI_CALL(env, napi_define_class(env, "MyObject", New, + NAPI_CALL(env, napi_define_class(env, "MyObject", -1, New, NULL, sizeof(properties)/sizeof(*properties), properties, &cons)); NAPI_CALL(env, napi_create_reference(env, cons, 1, &constructor_)); diff --git a/test/addons-napi/test_constructor/test_constructor_name.c b/test/addons-napi/test_constructor/test_constructor_name.c new file mode 100644 index 00000000000000..b178e80f49f489 --- /dev/null +++ b/test/addons-napi/test_constructor/test_constructor_name.c @@ -0,0 +1,23 @@ +#include +#include "../common.h" + +napi_ref constructor_; + +napi_value New(napi_env env, napi_callback_info info) { + napi_value _this; + NAPI_CALL(env, napi_get_cb_info(env, info, NULL, NULL, &_this, NULL)); + + return _this; +} + +napi_value Init(napi_env env, napi_value exports) { + napi_value cons; + NAPI_CALL(env, napi_define_class( + env, "MyObject_Extra", 8, New, NULL, 0, NULL, &cons)); + + NAPI_CALL(env, + napi_create_reference(env, cons, 1, &constructor_)); + return cons; +} + +NAPI_MODULE(addon, Init) diff --git a/test/addons-napi/test_env_sharing/compare_env.c b/test/addons-napi/test_env_sharing/compare_env.c index df200f2a96f986..39adfb9b713ab3 100644 --- a/test/addons-napi/test_env_sharing/compare_env.c +++ b/test/addons-napi/test_env_sharing/compare_env.c @@ -15,7 +15,7 @@ napi_value compare(napi_env env, napi_callback_info info) { } napi_value Init(napi_env env, napi_value exports) { - NAPI_CALL(env, napi_create_function(env, "exports", compare, NULL, &exports)); + NAPI_CALL(env, napi_create_function(env, "exports", -1, compare, NULL, &exports)); return exports; } diff --git a/test/addons-napi/test_fatal/test2.js b/test/addons-napi/test_fatal/test2.js new file mode 100644 index 00000000000000..b9bde8f13016cc --- /dev/null +++ b/test/addons-napi/test_fatal/test2.js @@ -0,0 +1,18 @@ +'use strict'; +const common = require('../../common'); +const assert = require('assert'); +const child_process = require('child_process'); +const test_fatal = require(`./build/${common.buildType}/test_fatal`); + +// Test in a child process because the test code will trigger a fatal error +// that crashes the process. +if (process.argv[2] === 'child') { + test_fatal.TestStringLength(); + return; +} + +const p = child_process.spawnSync( + process.execPath, [ '--napi-modules', __filename, 'child' ]); +assert.ifError(p.error); +assert.ok(p.stderr.toString().includes( + 'FATAL ERROR: test_fatal::Test fatal message')); diff --git a/test/addons-napi/test_fatal/test_fatal.c b/test/addons-napi/test_fatal/test_fatal.c index f6cb2184a6e8e2..4159fa4b47f567 100644 --- a/test/addons-napi/test_fatal/test_fatal.c +++ b/test/addons-napi/test_fatal/test_fatal.c @@ -2,13 +2,19 @@ #include "../common.h" napi_value Test(napi_env env, napi_callback_info info) { - napi_fatal_error("test_fatal::Test", "fatal message"); + napi_fatal_error("test_fatal::Test", -1, "fatal message", -1); + return NULL; +} + +napi_value TestStringLength(napi_env env, napi_callback_info info) { + napi_fatal_error("test_fatal::TestStringLength", 16, "fatal message", 13); return NULL; } napi_value Init(napi_env env, napi_value exports) { napi_property_descriptor properties[] = { DECLARE_NAPI_PROPERTY("Test", Test), + DECLARE_NAPI_PROPERTY("TestStringLength", TestStringLength), }; NAPI_CALL(env, napi_define_properties( diff --git a/test/addons-napi/test_function/test.js b/test/addons-napi/test_function/test.js index bdb9133adf9346..752e9965b23039 100644 --- a/test/addons-napi/test_function/test.js +++ b/test/addons-napi/test_function/test.js @@ -9,20 +9,23 @@ const test_function = require(`./build/${common.buildType}/test_function`); function func1() { return 1; } -assert.strictEqual(test_function.Test(func1), 1); +assert.strictEqual(test_function.TestCall(func1), 1); function func2() { console.log('hello world!'); return null; } -assert.strictEqual(test_function.Test(func2), null); +assert.strictEqual(test_function.TestCall(func2), null); function func3(input) { return input + 1; } -assert.strictEqual(test_function.Test(func3, 1), 2); +assert.strictEqual(test_function.TestCall(func3, 1), 2); function func4(input) { return func3(input); } -assert.strictEqual(test_function.Test(func4, 1), 2); +assert.strictEqual(test_function.TestCall(func4, 1), 2); + +assert.strictEqual(test_function.TestName.name, 'Name'); +assert.strictEqual(test_function.TestNameShort.name, 'Name_'); diff --git a/test/addons-napi/test_function/test_function.c b/test/addons-napi/test_function/test_function.c index 2cb25db5beaf03..77495d9b8a3a46 100644 --- a/test/addons-napi/test_function/test_function.c +++ b/test/addons-napi/test_function/test_function.c @@ -1,7 +1,7 @@ #include #include "../common.h" -napi_value Test(napi_env env, napi_callback_info info) { +napi_value TestCallFunction(napi_env env, napi_callback_info info) { size_t argc = 10; napi_value args[10]; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); @@ -26,10 +26,25 @@ napi_value Test(napi_env env, napi_callback_info info) { return result; } +void TestFunctionName(napi_env env, napi_callback_info info) {} + napi_value Init(napi_env env, napi_value exports) { - napi_value fn; - NAPI_CALL(env, napi_create_function(env, NULL, Test, NULL, &fn)); - NAPI_CALL(env, napi_set_named_property(env, exports, "Test", fn)); + napi_value fn1; + NAPI_CALL(env, napi_create_function( + env, NULL, -1, TestCallFunction, NULL, &fn1)); + + napi_value fn2; + NAPI_CALL(env, napi_create_function( + env, "Name", -1, TestFunctionName, NULL, &fn2)); + + napi_value fn3; + NAPI_CALL(env, napi_create_function( + env, "Name_extra", 5, TestFunctionName, NULL, &fn3)); + + NAPI_CALL(env, napi_set_named_property(env, exports, "TestCall", fn1)); + NAPI_CALL(env, napi_set_named_property(env, exports, "TestName", fn2)); + NAPI_CALL(env, napi_set_named_property(env, exports, "TestNameShort", fn3)); + return exports; } diff --git a/test/addons-napi/test_make_callback/binding.cc b/test/addons-napi/test_make_callback/binding.cc index 9032e8b2f3f301..18bf46764be40c 100644 --- a/test/addons-napi/test_make_callback/binding.cc +++ b/test/addons-napi/test_make_callback/binding.cc @@ -45,7 +45,7 @@ napi_value MakeCallback(napi_env env, napi_callback_info info) { napi_value Init(napi_env env, napi_value exports) { napi_value fn; - NAPI_CALL(env, napi_create_function(env, NULL, MakeCallback, NULL, &fn)); + NAPI_CALL(env, napi_create_function(env, NULL, -1, MakeCallback, NULL, &fn)); NAPI_CALL(env, napi_set_named_property(env, exports, "makeCallback", fn)); return exports; } diff --git a/test/addons-napi/test_make_callback_recurse/binding.cc b/test/addons-napi/test_make_callback_recurse/binding.cc index d0ee1ddb4998e6..ea823294352490 100644 --- a/test/addons-napi/test_make_callback_recurse/binding.cc +++ b/test/addons-napi/test_make_callback_recurse/binding.cc @@ -20,7 +20,7 @@ napi_value MakeCallback(napi_env env, napi_callback_info info) { napi_value Init(napi_env env, napi_value exports) { napi_value fn; - NAPI_CALL(env, napi_create_function(env, NULL, MakeCallback, NULL, &fn)); + NAPI_CALL(env, napi_create_function(env, NULL, -1, MakeCallback, NULL, &fn)); NAPI_CALL(env, napi_set_named_property(env, exports, "makeCallback", fn)); return exports; } From 5e29823d1d392218050bebfba9f68f569c9e5128 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Fri, 18 Aug 2017 13:30:05 +0300 Subject: [PATCH 151/227] n-api: remove n-api module loading flag Remove the command line flag that was needed for N-API module loading. Re: https://github.com/nodejs/vm/issues/9 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/14902 Reviewed-By: Refael Ackermann Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen Reviewed-By: Matteo Collina Reviewed-By: Michael Dawson Reviewed-By: James M Snell Reviewed-By: Hitesh Kanwathirtha --- doc/api/cli.md | 9 ----- doc/api/n-api.md | 8 ---- src/env-inl.h | 1 + src/env.cc | 6 +++ src/env.h | 3 ++ src/node.cc | 38 +++++++------------ src/node_api.cc | 20 +--------- test/addons-napi/test_async/test.js | 2 +- test/addons-napi/test_fatal/test.js | 4 +- .../addons-napi/test_function/test_function.c | 4 +- test/addons-napi/test_warning/binding.gyp | 12 ++++++ test/addons-napi/test_warning/test.js | 18 +++++++++ test/addons-napi/test_warning/test_warning.c | 11 ++++++ test/addons-napi/test_warning/test_warning2.c | 11 ++++++ test/addons-napi/testcfg.py | 2 +- 15 files changed, 83 insertions(+), 66 deletions(-) create mode 100644 test/addons-napi/test_warning/binding.gyp create mode 100644 test/addons-napi/test_warning/test.js create mode 100644 test/addons-napi/test_warning/test_warning.c create mode 100644 test/addons-napi/test_warning/test_warning2.c diff --git a/doc/api/cli.md b/doc/api/cli.md index 281a7fbbfba9c3..5a943b3428c6ca 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -119,14 +119,6 @@ added: v6.0.0 Silence all process warnings (including deprecations). -### `--napi-modules` - - -Enable loading native modules compiled with the ABI-stable Node.js API (N-API) -(experimental). - ### `--trace-warnings` +```C +NAPI_EXTERN napi_status napi_get_uv_event_loop(napi_env env, + uv_loop_t** loop); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[out] loop`: The current libuv loop instance. + [Promises]: #n_api_promises [Simple Asynchronous Operations]: #n_api_simple_asynchronous_operations [Custom Asynchronous Operations]: #n_api_custom_asynchronous_operations diff --git a/src/node_api.cc b/src/node_api.cc index f1c13214b3ed96..10c8093f5a280b 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -22,7 +22,7 @@ #include "node_api_backport.h" #include "util.h" -#define NAPI_VERSION 1 +#define NAPI_VERSION 2 static napi_status napi_set_last_error(napi_env env, napi_status error_code, @@ -31,8 +31,11 @@ napi_status napi_set_last_error(napi_env env, napi_status error_code, static napi_status napi_clear_last_error(napi_env env); struct napi_env__ { - explicit napi_env__(v8::Isolate* _isolate): isolate(_isolate), - has_instance_available(true), last_error() {} + explicit napi_env__(v8::Isolate* _isolate, uv_loop_t *_loop): + isolate(_isolate), + has_instance_available(true), + last_error(), + loop(_loop) {} ~napi_env__() { last_exception.Reset(); has_instance.Reset(); @@ -49,6 +52,7 @@ struct napi_env__ { bool has_instance_available; napi_extended_error_info last_error; int open_handle_scopes = 0; + uv_loop_t* loop = nullptr; }; #define ENV_OBJECT_TEMPLATE(env, prefix, destination, field_count) \ @@ -780,7 +784,7 @@ napi_env GetEnv(v8::Local context) { if (value->IsExternal()) { result = static_cast(value.As()->Value()); } else { - result = new napi_env__(isolate); + result = new napi_env__(isolate, node::GetCurrentEventLoop(isolate)); auto external = v8::External::New(isolate, result); // We must also stop hard if the result of assigning the env to the global @@ -3494,15 +3498,22 @@ napi_status napi_delete_async_work(napi_env env, napi_async_work work) { return napi_clear_last_error(env); } +napi_status napi_get_uv_event_loop(napi_env env, uv_loop_t** loop) { + CHECK_ENV(env); + CHECK_ARG(env, loop); + *loop = env->loop; + return napi_clear_last_error(env); +} + napi_status napi_queue_async_work(napi_env env, napi_async_work work) { CHECK_ENV(env); CHECK_ARG(env, work); - // Consider: Encapsulate the uv_loop_t into an opaque pointer parameter. - // Currently the environment event loop is the same as the UV default loop. - // Someday (if node ever supports multiple isolates), it may be better to get - // the loop from node::Environment::GetCurrent(env->isolate)->event_loop(); - uv_loop_t* event_loop = uv_default_loop(); + napi_status status; + uv_loop_t* event_loop = nullptr; + status = napi_get_uv_event_loop(env, &event_loop); + if (status != napi_ok) + return napi_set_last_error(env, status); uvimpl::Work* w = reinterpret_cast(work); diff --git a/src/node_api.h b/src/node_api.h index a3a07a64673366..8e5eef8a47728f 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -14,6 +14,8 @@ #include #include "node_api_types.h" +struct uv_loop_s; // Forward declaration. + #ifdef _WIN32 #ifdef BUILDING_NODE_EXTENSION #ifdef EXTERNAL_NAPI @@ -581,6 +583,10 @@ NAPI_EXTERN napi_status napi_run_script(napi_env env, napi_value script, napi_value* result); +// Return the current libuv event loop for a given environment +NAPI_EXTERN napi_status napi_get_uv_event_loop(napi_env env, + struct uv_loop_s** loop); + EXTERN_C_END #endif // SRC_NODE_API_H_ diff --git a/src/node_api_backport.cc b/src/node_api_backport.cc index 655b61da5cf094..52a0653bfcc591 100644 --- a/src/node_api_backport.cc +++ b/src/node_api_backport.cc @@ -71,6 +71,14 @@ CallbackScope::~CallbackScope() { env->tick_callback_function()->Call(env->process_object(), 0, nullptr); } +uv_loop_t *GetCurrentEventLoop(v8::Isolate *isolate) { + HandleScope handle_scope(isolate); + auto context = isolate->GetCurrentContext(); + if (context.IsEmpty()) + return nullptr; + return Environment::GetCurrent(context)->event_loop(); +} + AsyncResource::AsyncResource(v8::Isolate* _isolate, v8::Local _object, char* name) : isolate(_isolate) { diff --git a/src/node_api_backport.h b/src/node_api_backport.h index 89ae33d0861529..968169ef3bd05f 100644 --- a/src/node_api_backport.h +++ b/src/node_api_backport.h @@ -31,6 +31,8 @@ class CallbackScope { Environment::AsyncCallbackScope callback_scope; }; +uv_loop_t *GetCurrentEventLoop(v8::Isolate *isolate); + NODE_EXTERN async_context EmitAsyncInit(v8::Isolate* isolate, v8::Local resource, v8::Local name, diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js index 51ce9563a7196e..10b85ef727bb20 100644 --- a/test/addons-napi/test_general/test.js +++ b/test/addons-napi/test_general/test.js @@ -34,7 +34,7 @@ assert.ok(test_general.testGetPrototype(baseObject) !== // test version management funcitons // expected version is currently 1 -assert.strictEqual(test_general.testGetVersion(), 1); +assert.strictEqual(test_general.testGetVersion(), 2); const [ major, minor, patch, release ] = test_general.testGetNodeVersion(); assert.strictEqual(process.version.split('-')[0], From b20c278a7c148d02301562f361140d56d0faa5a9 Mon Sep 17 00:00:00 2001 From: neta Date: Mon, 20 Nov 2017 21:41:56 +0200 Subject: [PATCH 178/227] src: add napi_handle_scope_mismatch to msg list Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/17161 Reviewed-By: Timothy Gu Reviewed-By: Benjamin Gruenbaum Reviewed-By: Refael Ackermann Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Michael Dawson Reviewed-By: Alexey Orlenko Reviewed-By: Franziska Hinkelmann --- src/node_api.cc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/node_api.cc b/src/node_api.cc index 10c8093f5a280b..9f6d407cfd3ec1 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -909,7 +909,8 @@ const char* error_messages[] = {nullptr, "Unknown failure", "An exception is pending", "The async work item was cancelled", - "napi_escape_handle already called on scope"}; + "napi_escape_handle already called on scope", + "Invalid handle scope usage"}; static inline napi_status napi_clear_last_error(napi_env env) { env->last_error.error_code = napi_ok; @@ -940,9 +941,9 @@ napi_status napi_get_last_error_info(napi_env env, // We don't have a napi_status_last as this would result in an ABI // change each time a message was added. static_assert( - node::arraysize(error_messages) == napi_escape_called_twice + 1, + node::arraysize(error_messages) == napi_handle_scope_mismatch + 1, "Count of error messages must match count of error values"); - CHECK_LE(env->last_error.error_code, napi_escape_called_twice); + CHECK_LE(env->last_error.error_code, napi_handle_scope_mismatch); // Wait until someone requests the last error information to fetch the error // message string From c6852126fd3257eb2ea0a38d635c3895ba67921e Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 18 Nov 2017 20:01:57 -0800 Subject: [PATCH 179/227] doc: use "JavaScript" instead of "Javascript" MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/17163 Reviewed-By: Michaël Zasso Reviewed-By: Vse Mozhet Byt Reviewed-By: Benjamin Gruenbaum Reviewed-By: Alexey Orlenko Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Gireesh Punathil --- README.md | 4 ++-- doc/api/n-api.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 6d894e9c8e40e8..ab7c7967efc26e 100644 --- a/README.md +++ b/README.md @@ -194,8 +194,8 @@ us a report nonetheless. - [#14519](https://github.com/nodejs/node/issues/14519): _Internal domain function can be used to cause segfaults_. Causing program termination using - either the public Javascript APIs or the private bindings layer APIs requires - the ability to execute arbitrary Javascript code, which is already the highest + either the public JavaScript APIs or the private bindings layer APIs requires + the ability to execute arbitrary JavaScript code, which is already the highest level of privilege possible. - [#12141](https://github.com/nodejs/node/pull/12141): _buffer: zero fill diff --git a/doc/api/n-api.md b/doc/api/n-api.md index b31ea3a2688d43..3c2ce8d14eb080 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -202,7 +202,7 @@ typedef void (*napi_async_complete_callback)(napi_env env, ``` ## Error Handling -N-API uses both return values and Javascript exceptions for error handling. +N-API uses both return values and JavaScript exceptions for error handling. The following sections explain the approach for each case. ### Return values @@ -2861,7 +2861,7 @@ Returns `napi_ok` if the API succeeded. This API allows an add-on author to create a function object in native code. This is the primary mechanism to allow calling *into* the add-on's native code -*from* Javascript. +*from* JavaScript. **Note:** The newly created function is not automatically visible from script after this call. Instead, a property must be explicitly set on any From d4284a464bc1ef73493f5fd22672f784cbe159fe Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 23 Nov 2017 15:25:14 +0100 Subject: [PATCH 180/227] n-api: use nullptr instead of NULL in node_api.cc MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit changes two checks which use NULL to use nullptr. I'm not very familiar with N-API but wanted to bring this up in case it was something that was overlooked. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/17276 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Benjamin Gruenbaum Reviewed-By: Timothy Gu Reviewed-By: Ali Ijaz Sheikh Reviewed-By: Michael Dawson Reviewed-By: Lance Ball Reviewed-By: Alexey Orlenko Reviewed-By: James M Snell Reviewed-By: MichaëZasso --- src/node_api.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/node_api.cc b/src/node_api.cc index 9f6d407cfd3ec1..adc1c9c1b48e7d 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -1228,7 +1228,7 @@ napi_status napi_delete_property(napi_env env, v8::Maybe delete_maybe = obj->Delete(context, k); CHECK_MAYBE_NOTHING(env, delete_maybe, napi_generic_failure); - if (result != NULL) + if (result != nullptr) *result = delete_maybe.FromMaybe(false); return GET_RETURN_STATUS(env); @@ -1406,7 +1406,7 @@ napi_status napi_delete_element(napi_env env, v8::Maybe delete_maybe = obj->Delete(context, index); CHECK_MAYBE_NOTHING(env, delete_maybe, napi_generic_failure); - if (result != NULL) + if (result != nullptr) *result = delete_maybe.FromMaybe(false); return GET_RETURN_STATUS(env); From 173f29763ebcf765f21c5056a2b79f874bd94cec Mon Sep 17 00:00:00 2001 From: Franziska Hinkelmann Date: Mon, 4 Dec 2017 18:45:22 +0100 Subject: [PATCH 181/227] doc: update example in module registration Update return type of `Init` function in documentation to match `napi_addon_register_func` signature. Return type used to be `void`, now it is `napi_value`. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/17424 Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- doc/api/n-api.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 3c2ce8d14eb080..f58f96742eecb3 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -2877,15 +2877,17 @@ napi_value SayHello(napi_env env, napi_callback_info info) { return nullptr; } -void Init(napi_env env, napi_value exports, napi_value module, void* priv) { +napi_value Init(napi_env env, napi_value exports) { napi_status status; napi_value fn; - status = napi_create_function(env, NULL, SayHello, NULL, &fn); - if (status != napi_ok) return; + status = napi_create_function(env, nullptr, 0, SayHello, nullptr, &fn); + if (status != napi_ok) return nullptr; status = napi_set_named_property(env, exports, "sayHello", fn); - if (status != napi_ok) return; + if (status != napi_ok) return nullptr; + + return exports; } NAPI_MODULE(addon, Init) From e54b58c02447cbe4fd3063d93d1bd651a865450c Mon Sep 17 00:00:00 2001 From: Leko Date: Mon, 4 Dec 2017 16:07:45 +0900 Subject: [PATCH 182/227] test: replace assert.throws with common.expectsError Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/17445 Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: Jon Moss Reviewed-By: James M Snell --- test/addons-napi/test_error/test.js | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/test/addons-napi/test_error/test.js b/test/addons-napi/test_error/test.js index 245e734e7503f0..bc2b7a8db75d3e 100644 --- a/test/addons-napi/test_error/test.js +++ b/test/addons-napi/test_error/test.js @@ -60,29 +60,26 @@ assert.throws(() => { test_error.throwTypeError(); }, /^TypeError: type error$/); -assert.throws( +common.expectsError( () => test_error.throwErrorCode(), - common.expectsError({ + { code: 'ERR_TEST_CODE', message: 'Error [error]' - }) -); + }); -assert.throws( +common.expectsError( () => test_error.throwRangeErrorCode(), - common.expectsError({ + { code: 'ERR_TEST_CODE', message: 'RangeError [range error]' - }) -); + }); -assert.throws( +common.expectsError( () => test_error.throwTypeErrorCode(), - common.expectsError({ + { code: 'ERR_TEST_CODE', message: 'TypeError [type error]' - }) -); + }); let error = test_error.createError(); assert.ok(error instanceof Error, 'expected error to be an instance of Error'); From 86ddd0360856bee56d4012fe1d540a6563da9f4a Mon Sep 17 00:00:00 2001 From: babygoat Date: Thu, 23 Nov 2017 19:44:20 +0800 Subject: [PATCH 183/227] test: add unhandled rejection guard Add an unhandled rejection function in addons-napi/test_promise/test.js. Also, add a rejection handler to catch the unhandled rejection after introducing the guard and test the reason code. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/17275 Reviewed-By: Anna Henningsen Reviewed-By: Ruben Bridgewater --- test/addons-napi/test_promise/test.js | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/test/addons-napi/test_promise/test.js b/test/addons-napi/test_promise/test.js index b43ecd87363e44..6dc51b3fa558a2 100644 --- a/test/addons-napi/test_promise/test.js +++ b/test/addons-napi/test_promise/test.js @@ -7,6 +7,8 @@ const common = require('../../common'); const assert = require('assert'); const test_promise = require(`./build/${common.buildType}/test_promise`); +common.crashOnUnhandledRejection(); + // A resolution { const expected_result = 42; @@ -44,7 +46,14 @@ const test_promise = require(`./build/${common.buildType}/test_promise`); } assert.strictEqual(test_promise.isPromise(test_promise.createPromise()), true); -assert.strictEqual(test_promise.isPromise(Promise.reject(-1)), true); + +const rejectPromise = Promise.reject(-1); +const expected_reason = -1; +assert.strictEqual(test_promise.isPromise(rejectPromise), true); +rejectPromise.catch((reason) => { + assert.strictEqual(reason, expected_reason); +}); + assert.strictEqual(test_promise.isPromise(2.4), false); assert.strictEqual(test_promise.isPromise('I promise!'), false); assert.strictEqual(test_promise.isPromise(undefined), false); From 412cc177485f8c4835b2ecf9263f063bf649b21a Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 12 Dec 2017 21:20:47 -0800 Subject: [PATCH 184/227] test: remove literals that obscure assert messages Remove string literals as messages to `assert.strictEqual()`. They can be misleading here (where perhaps the reason an assertino failed isn't that the deleter wasn't called but rather was called too many times. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/17642 Reviewed-By: Anna Henningsen Reviewed-By: Ruben Bridgewater Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Jon Moss Reviewed-By: James M Snell Reviewed-By: Anatoli Papirovski Reviewed-By: Michael Dawson --- test/addons-napi/test_buffer/test.js | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/test/addons-napi/test_buffer/test.js b/test/addons-napi/test_buffer/test.js index 713966775df18b..740b0474a79c60 100644 --- a/test/addons-napi/test_buffer/test.js +++ b/test/addons-napi/test_buffer/test.js @@ -9,14 +9,13 @@ assert.strictEqual(binding.newBuffer().toString(), binding.theText); assert.strictEqual(binding.newExternalBuffer().toString(), binding.theText); console.log('gc1'); global.gc(); -assert.strictEqual(binding.getDeleterCallCount(), 1, 'deleter was not called'); +assert.strictEqual(binding.getDeleterCallCount(), 1); assert.strictEqual(binding.copyBuffer().toString(), binding.theText); let buffer = binding.staticBuffer(); -assert.strictEqual(binding.bufferHasInstance(buffer), true, - 'buffer type checking fails'); +assert.strictEqual(binding.bufferHasInstance(buffer), true); assert.strictEqual(binding.bufferInfo(buffer), true); buffer = null; global.gc(); console.log('gc2'); -assert.strictEqual(binding.getDeleterCallCount(), 2, 'deleter was not called'); +assert.strictEqual(binding.getDeleterCallCount(), 2); From 65ea7abd55c06f43bf3e26adfb28961c82dc7d84 Mon Sep 17 00:00:00 2001 From: alnyan Date: Sun, 17 Dec 2017 12:22:46 +0200 Subject: [PATCH 185/227] n-api: fix memory leak in napi_async_destroy() Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/17714 Reviewed-By: Alexey Orlenko Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Timothy Gu Reviewed-By: James M Snell --- src/node_api.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/node_api.cc b/src/node_api.cc index adc1c9c1b48e7d..b3334c486edc14 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -2864,6 +2864,8 @@ napi_status napi_async_destroy(napi_env env, reinterpret_cast(async_context); node::EmitAsyncDestroy(isolate, *node_async_context); + delete node_async_context; + return napi_clear_last_error(env); } From a6c277e2eb9e55dd17153931ee3a29ad8635e731 Mon Sep 17 00:00:00 2001 From: Nicholas Drane Date: Thu, 21 Dec 2017 16:48:41 -0600 Subject: [PATCH 186/227] test: remove ambiguous error messages from test_error assert.strictEqual accepts 3 arguments, the last of which allows for user-specified error message to be thrown when the assertion fails. Unfortunately, this error message is less helpful than the default when it is vague. This commit removes vague, user-specified error messages, instead relying on clearer, default error messages. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/17812 Reviewed-By: Colin Ihrig Reviewed-By: Jon Moss Reviewed-By: Rich Trott Reviewed-By: Ruben Bridgewater Reviewed-By: James M Snell Reviewed-By: Luigi Pinca --- test/addons-napi/test_error/test.js | 46 ++++++++--------------------- 1 file changed, 12 insertions(+), 34 deletions(-) diff --git a/test/addons-napi/test_error/test.js b/test/addons-napi/test_error/test.js index bc2b7a8db75d3e..d5c92cb4c3cd2e 100644 --- a/test/addons-napi/test_error/test.js +++ b/test/addons-napi/test_error/test.js @@ -83,56 +83,34 @@ common.expectsError( let error = test_error.createError(); assert.ok(error instanceof Error, 'expected error to be an instance of Error'); -assert.strictEqual(error.message, 'error', 'expected message to be "error"'); +assert.strictEqual(error.message, 'error'); error = test_error.createRangeError(); assert.ok(error instanceof RangeError, 'expected error to be an instance of RangeError'); -assert.strictEqual(error.message, - 'range error', - 'expected message to be "range error"'); +assert.strictEqual(error.message, 'range error'); error = test_error.createTypeError(); assert.ok(error instanceof TypeError, 'expected error to be an instance of TypeError'); -assert.strictEqual(error.message, - 'type error', - 'expected message to be "type error"'); +assert.strictEqual(error.message, 'type error'); error = test_error.createErrorCode(); assert.ok(error instanceof Error, 'expected error to be an instance of Error'); -assert.strictEqual(error.code, - 'ERR_TEST_CODE', - 'expected code to be "ERR_TEST_CODE"'); -assert.strictEqual(error.message, - 'Error [error]', - 'expected message to be "Error [error]"'); -assert.strictEqual(error.name, - 'Error [ERR_TEST_CODE]', - 'expected name to be "Error [ERR_TEST_CODE]"'); +assert.strictEqual(error.code, 'ERR_TEST_CODE'); +assert.strictEqual(error.message, 'Error [error]'); +assert.strictEqual(error.name, 'Error [ERR_TEST_CODE]'); error = test_error.createRangeErrorCode(); assert.ok(error instanceof RangeError, 'expected error to be an instance of RangeError'); -assert.strictEqual(error.message, - 'RangeError [range error]', - 'expected message to be "RangeError [range error]"'); -assert.strictEqual(error.code, - 'ERR_TEST_CODE', - 'expected code to be "ERR_TEST_CODE"'); -assert.strictEqual(error.name, - 'RangeError [ERR_TEST_CODE]', - 'expected name to be "RangeError[ERR_TEST_CODE]"'); +assert.strictEqual(error.message, 'RangeError [range error]'); +assert.strictEqual(error.code, 'ERR_TEST_CODE'); +assert.strictEqual(error.name, 'RangeError [ERR_TEST_CODE]'); error = test_error.createTypeErrorCode(); assert.ok(error instanceof TypeError, 'expected error to be an instance of TypeError'); -assert.strictEqual(error.message, - 'TypeError [type error]', - 'expected message to be "TypeError [type error]"'); -assert.strictEqual(error.code, - 'ERR_TEST_CODE', - 'expected code to be "ERR_TEST_CODE"'); -assert.strictEqual(error.name, - 'TypeError [ERR_TEST_CODE]', - 'expected name to be "TypeError[ERR_TEST_CODE]"'); +assert.strictEqual(error.message, 'TypeError [type error]'); +assert.strictEqual(error.code, 'ERR_TEST_CODE'); +assert.strictEqual(error.name, 'TypeError [ERR_TEST_CODE]'); From 3d8e1aaf487995d10e4b688dab92bfb4c525d93e Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 5 Jan 2018 12:19:20 -0500 Subject: [PATCH 187/227] doc: updates examples to use NULL MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Examples in the N-API doc used a mix of nullptr and NULL. We should be consistent and because N-API is a 'C' API I believe using NULL is better. This will avoid any potential confusion as to whether N-API can be used with plain C. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18008 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen Reviewed-By: Franziska Hinkelmann Reviewed-By: Gireesh Punathil --- doc/api/n-api.md | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index f58f96742eecb3..86bdecbdff0273 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -903,9 +903,9 @@ napi_value Init(napi_env env, napi_value exports) { napi_status status; napi_property_descriptor desc = {"hello", Method, 0, 0, 0, napi_default, 0}; - if (status != napi_ok) return nullptr; + if (status != napi_ok) return NULL; status = napi_define_properties(env, exports, 1, &desc); - if (status != napi_ok) return nullptr; + if (status != napi_ok) return NULL; return exports; } ``` @@ -917,7 +917,7 @@ napi_value Init(napi_env env, napi_value exports) { napi_value method; napi_status status; status = napi_create_function(env, "exports", Method, NULL, &method)); - if (status != napi_ok) return nullptr; + if (status != napi_ok) return NULL; return method; } ``` @@ -930,21 +930,21 @@ For example, to define a class so that new instances can be created napi_value Init(napi_env env, napi_value exports) { napi_status status; napi_property_descriptor properties[] = { - { "value", nullptr, GetValue, SetValue, 0, napi_default, 0 }, + { "value", NULL, GetValue, SetValue, 0, napi_default, 0 }, DECLARE_NAPI_METHOD("plusOne", PlusOne), DECLARE_NAPI_METHOD("multiply", Multiply), }; napi_value cons; status = - napi_define_class(env, "MyObject", New, nullptr, 3, properties, &cons); - if (status != napi_ok) return nullptr; + napi_define_class(env, "MyObject", New, NULL, 3, properties, &cons); + if (status != napi_ok) return NULL; status = napi_create_reference(env, cons, 1, &constructor); - if (status != napi_ok) return nullptr; + if (status != napi_ok) return NULL; status = napi_set_named_property(env, exports, "MyObject", cons); - if (status != napi_ok) return nullptr; + if (status != napi_ok) return NULL; return exports; } @@ -2362,8 +2362,8 @@ if (status != napi_ok) return status; // Set the properties napi_property_descriptor descriptors[] = { - { "foo", nullptr, 0, 0, 0, fooValue, napi_default, 0 }, - { "bar", nullptr, 0, 0, 0, barValue, napi_default, 0 } + { "foo", NULL, 0, 0, 0, fooValue, napi_default, 0 }, + { "bar", NULL, 0, 0, 0, barValue, napi_default, 0 } } status = napi_define_properties(env, obj, @@ -2874,18 +2874,18 @@ object. A sample module might look as follows: ```C napi_value SayHello(napi_env env, napi_callback_info info) { printf("Hello\n"); - return nullptr; + return NULL; } napi_value Init(napi_env env, napi_value exports) { napi_status status; napi_value fn; - status = napi_create_function(env, nullptr, 0, SayHello, nullptr, &fn); - if (status != napi_ok) return nullptr; + status = napi_create_function(env, NULL, 0, SayHello, NULL, &fn); + if (status != napi_ok) return NULL; status = napi_set_named_property(env, exports, "sayHello", fn); - if (status != napi_ok) return nullptr; + if (status != napi_ok) return NULL; return exports; } @@ -2950,7 +2950,7 @@ napi_status napi_get_new_target(napi_env env, Returns `napi_ok` if the API succeeded. This API returns the `new.target` of the constructor call. If the current -callback is not a constructor call, the result is `nullptr`. +callback is not a constructor call, the result is `NULL`. ### *napi_new_instance* @@ -3387,7 +3387,7 @@ napi_status napi_async_init(napi_env env, Returns `napi_ok` if the API succeeded. -### *napi_async_destroy** +### napi_async_destroy @@ -3401,7 +3401,7 @@ napi_status napi_async_destroy(napi_env env, Returns `napi_ok` if the API succeeded. -### *napi_make_callback* +### napi_make_callback @@ -1036,7 +1036,7 @@ JavaScript arrays are described in [Section 22.1](https://tc39.github.io/ecma262/#sec-array-objects) of the ECMAScript Language Specification. -#### *napi_create_array_with_length* +#### napi_create_array_with_length @@ -1065,7 +1065,7 @@ JavaScript arrays are described in [Section 22.1](https://tc39.github.io/ecma262/#sec-array-objects) of the ECMAScript Language Specification. -#### *napi_create_arraybuffer* +#### napi_create_arraybuffer @@ -1097,7 +1097,7 @@ JavaScript ArrayBuffer objects are described in [Section 24.1](https://tc39.github.io/ecma262/#sec-arraybuffer-objects) of the ECMAScript Language Specification. -#### *napi_create_buffer* +#### napi_create_buffer @@ -1118,7 +1118,7 @@ Returns `napi_ok` if the API succeeded. This API allocates a `node::Buffer` object. While this is still a fully-supported data structure, in most cases using a TypedArray will suffice. -#### *napi_create_buffer_copy* +#### napi_create_buffer_copy @@ -1143,7 +1143,7 @@ This API allocates a `node::Buffer` object and initializes it with data copied from the passed-in buffer. While this is still a fully-supported data structure, in most cases using a TypedArray will suffice. -#### *napi_create_external* +#### napi_create_external @@ -1210,7 +1210,7 @@ JavaScript ArrayBuffers are described in [Section 24.1](https://tc39.github.io/ecma262/#sec-arraybuffer-objects) of the ECMAScript Language Specification. -#### *napi_create_external_buffer* +#### napi_create_external_buffer @@ -1241,7 +1241,7 @@ structure, in most cases using a TypedArray will suffice. **Note:** For Node.js >=4 `Buffers` are Uint8Arrays. -#### *napi_create_function* +#### napi_create_function @@ -1274,7 +1274,7 @@ JavaScript Functions are described in [Section 19.2](https://tc39.github.io/ecma262/#sec-function-objects) of the ECMAScript Language Specification. -#### *napi_create_object* +#### napi_create_object @@ -1294,7 +1294,7 @@ The JavaScript Object type is described in [Section 6.1.7](https://tc39.github.io/ecma262/#sec-object-type) of the ECMAScript Language Specification. -#### *napi_create_symbol* +#### napi_create_symbol @@ -1317,7 +1317,7 @@ The JavaScript Symbol type is described in [Section 19.4](https://tc39.github.io/ecma262/#sec-symbol-objects) of the ECMAScript Language Specification. -#### *napi_create_typedarray* +#### napi_create_typedarray @@ -1353,7 +1353,7 @@ JavaScript TypedArray Objects are described in of the ECMAScript Language Specification. -#### *napi_create_dataview* +#### napi_create_dataview @@ -1387,7 +1387,7 @@ JavaScript DataView Objects are described in [Section 24.3][] of the ECMAScript Language Specification. ### Functions to convert from C types to N-API -#### *napi_create_int32* +#### napi_create_int32 @@ -1408,7 +1408,7 @@ The JavaScript Number type is described in [Section 6.1.6](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type) of the ECMAScript Language Specification. -#### *napi_create_uint32* +#### napi_create_uint32 @@ -1429,7 +1429,7 @@ The JavaScript Number type is described in [Section 6.1.6](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type) of the ECMAScript Language Specification. -#### *napi_create_int64* +#### napi_create_int64 @@ -1456,7 +1456,7 @@ outside the range of [`Number.MAX_SAFE_INTEGER`](https://tc39.github.io/ecma262/#sec-number.max_safe_integer) (2^53 - 1) will lose precision. -#### *napi_create_double* +#### napi_create_double @@ -1477,7 +1477,7 @@ The JavaScript Number type is described in [Section 6.1.6](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type) of the ECMAScript Language Specification. -#### *napi_create_string_latin1* +#### napi_create_string_latin1 @@ -1502,7 +1502,7 @@ The JavaScript String type is described in [Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type) of the ECMAScript Language Specification. -#### *napi_create_string_utf16* +#### napi_create_string_utf16 @@ -1527,7 +1527,7 @@ The JavaScript String type is described in [Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type) of the ECMAScript Language Specification. -#### *napi_create_string_utf8* +#### napi_create_string_utf8 @@ -1553,7 +1553,7 @@ The JavaScript String type is described in of the ECMAScript Language Specification. ### Functions to convert from N-API to C types -#### *napi_get_array_length* +#### napi_get_array_length @@ -1576,7 +1576,7 @@ Array length is described in [Section 22.1.4.1](https://tc39.github.io/ecma262/#sec-properties-of-array-instances-length) of the ECMAScript Language Specification. -#### *napi_get_arraybuffer_info* +#### napi_get_arraybuffer_info @@ -1604,7 +1604,7 @@ which can be used to guarantee control over the lifetime of the ArrayBuffer. It's also safe to use the returned data buffer within the same callback as long as there are no calls to other APIs that might trigger a GC. -#### *napi_get_buffer_info* +#### napi_get_buffer_info @@ -1628,7 +1628,7 @@ and it's length. *Warning*: Use caution while using this API since the underlying data buffer's lifetime is not guaranteed if it's managed by the VM. -#### *napi_get_prototype* +#### napi_get_prototype @@ -1646,7 +1646,7 @@ not the same as the function's `prototype` property). Returns `napi_ok` if the API succeeded. -#### *napi_get_typedarray_info* +#### napi_get_typedarray_info @@ -1678,7 +1678,7 @@ is managed by the VM -#### *napi_get_dataview_info* +#### napi_get_dataview_info @@ -1706,7 +1706,7 @@ Returns `napi_ok` if the API succeeded. This API returns various properties of a DataView. -#### *napi_get_value_bool* +#### napi_get_value_bool @@ -1725,7 +1725,7 @@ passed in it returns `napi_boolean_expected`. This API returns the C boolean primitive equivalent of the given JavaScript Boolean. -#### *napi_get_value_double* +#### napi_get_value_double @@ -1747,7 +1747,7 @@ This API returns the C double primitive equivalent of the given JavaScript Number. -#### *napi_get_value_external* +#### napi_get_value_external @@ -1767,7 +1767,7 @@ passed in it returns `napi_invalid_arg`. This API retrieves the external data pointer that was previously passed to `napi_create_external()`. -#### *napi_get_value_int32* +#### napi_get_value_int32 @@ -1790,7 +1790,7 @@ of the given JavaScript Number. If the number exceeds the range of the bottom 32 bits. This can result in a large positive number becoming a negative number if the value is > 2^31 -1. -#### *napi_get_value_int64* +#### napi_get_value_int64 @@ -1810,7 +1810,7 @@ is passed in it returns `napi_number_expected`. This API returns the C int64 primitive equivalent of the given JavaScript Number -#### *napi_get_value_string_latin1* +#### napi_get_value_string_latin1 @@ -1837,7 +1837,7 @@ is passed in it returns `napi_string_expected`. This API returns the ISO-8859-1-encoded string corresponding the value passed in. -#### *napi_get_value_string_utf8* +#### napi_get_value_string_utf8 @@ -1863,7 +1863,7 @@ is passed in it returns `napi_string_expected`. This API returns the UTF8-encoded string corresponding the value passed in. -#### *napi_get_value_string_utf16* +#### napi_get_value_string_utf16 @@ -1889,7 +1889,7 @@ is passed in it returns `napi_string_expected`. This API returns the UTF16-encoded string corresponding the value passed in. -#### *napi_get_value_uint32* +#### napi_get_value_uint32 @@ -1911,7 +1911,7 @@ This API returns the C primitive equivalent of the given `napi_value` as a `uint32_t`. ### Functions to get global instances -#### *napi_get_boolean* +#### napi_get_boolean @@ -1929,7 +1929,7 @@ Returns `napi_ok` if the API succeeded. This API is used to return the JavaScript singleton object that is used to represent the given boolean value -#### *napi_get_global* +#### napi_get_global @@ -1944,7 +1944,7 @@ Returns `napi_ok` if the API succeeded. This API returns the global Object. -#### *napi_get_null* +#### napi_get_null @@ -1959,7 +1959,7 @@ Returns `napi_ok` if the API succeeded. This API returns the null Object. -#### *napi_get_undefined* +#### napi_get_undefined @@ -1987,7 +1987,7 @@ These APIs support doing one of the following: 2. Check the type of a JavaScript value 3. Check for equality between two JavaScript values -### *napi_coerce_to_bool* +### napi_coerce_to_bool @@ -2008,7 +2008,7 @@ This API implements the abstract operation ToBoolean as defined in of the ECMAScript Language Specification. This API can be re-entrant if getters are defined on the passed-in Object. -### *napi_coerce_to_number* +### napi_coerce_to_number @@ -2029,7 +2029,7 @@ This API implements the abstract operation ToNumber as defined in of the ECMAScript Language Specification. This API can be re-entrant if getters are defined on the passed-in Object. -### *napi_coerce_to_object* +### napi_coerce_to_object @@ -2050,7 +2050,7 @@ This API implements the abstract operation ToObject as defined in of the ECMAScript Language Specification. This API can be re-entrant if getters are defined on the passed-in Object. -### *napi_coerce_to_string* +### napi_coerce_to_string @@ -2071,7 +2071,7 @@ This API implements the abstract operation ToString as defined in of the ECMAScript Language Specification. This API can be re-entrant if getters are defined on the passed-in Object. -### *napi_typeof* +### napi_typeof @@ -2092,7 +2092,7 @@ the object as defined in [Section 12.5.5][] of the ECMAScript Language Specification. However, it has support for detecting an External value. If `value` has a type that is invalid, an error is returned. -### *napi_instanceof* +### napi_instanceof @@ -2117,7 +2117,7 @@ defined in [Section 12.10.4](https://tc39.github.io/ecma262/#sec-instanceofoperator) of the ECMAScript Language Specification. -### *napi_is_array* +### napi_is_array @@ -2135,7 +2135,7 @@ This API represents invoking the `IsArray` operation on the object as defined in [Section 7.2.2](https://tc39.github.io/ecma262/#sec-isarray) of the ECMAScript Language Specification. -### *napi_is_arraybuffer* +### napi_is_arraybuffer @@ -2151,7 +2151,7 @@ Returns `napi_ok` if the API succeeded. This API checks if the Object passsed in is an array buffer. -### *napi_is_buffer* +### napi_is_buffer @@ -2168,7 +2168,7 @@ Returns `napi_ok` if the API succeeded. This API checks if the Object passsed in is a buffer. -### *napi_is_error* +### napi_is_error @@ -2184,7 +2184,7 @@ Returns `napi_ok` if the API succeeded. This API checks if the Object passsed in is an Error. -### *napi_is_typedarray* +### napi_is_typedarray @@ -2202,7 +2202,7 @@ This API checks if the Object passsed in is a typed array. -### *napi_is_dataview* +### napi_is_dataview @@ -2219,7 +2219,7 @@ Returns `napi_ok` if the API succeeded. This API checks if the Object passed in is a DataView. -### *napi_strict_equals* +### napi_strict_equals @@ -2373,7 +2373,7 @@ if (status != napi_ok) return status; ``` ### Structures -#### *napi_property_attributes* +#### napi_property_attributes ```C typedef enum { napi_default = 0, @@ -2407,7 +2407,7 @@ a static property on a class as opposed to an instance property, which is the default. This is used only by [`napi_define_class`][]. It is ignored by `napi_define_properties`. -#### *napi_property_descriptor* +#### napi_property_descriptor ```C typedef struct { // One of utf8name or name should be NULL. @@ -2453,7 +2453,7 @@ this function is invoked. See [`napi_property_attributes`](#napi_property_attributes). ### Functions -#### *napi_get_property_names* +#### napi_get_property_names @@ -2474,7 +2474,7 @@ Returns `napi_ok` if the API succeeded. This API returns the array of propertys for the Object passed in -#### *napi_set_property* +#### napi_set_property @@ -2494,7 +2494,7 @@ Returns `napi_ok` if the API succeeded. This API set a property on the Object passed in. -#### *napi_get_property* +#### napi_get_property @@ -2515,7 +2515,7 @@ Returns `napi_ok` if the API succeeded. This API gets the requested property from the Object passed in. -#### *napi_has_property* +#### napi_has_property @@ -2536,7 +2536,7 @@ Returns `napi_ok` if the API succeeded. This API checks if the Object passed in has the named property. -#### *napi_delete_property* +#### napi_delete_property @@ -2558,7 +2558,7 @@ Returns `napi_ok` if the API succeeded. This API attempts to delete the `key` own property from `object`. -#### *napi_has_own_property* +#### napi_has_own_property @@ -2581,7 +2581,7 @@ be a string or a Symbol, or an error will be thrown. N-API will not perform any conversion between data types. -#### *napi_set_named_property* +#### napi_set_named_property @@ -2602,7 +2602,7 @@ Returns `napi_ok` if the API succeeded. This method is equivalent to calling [`napi_set_property`][] with a `napi_value` created from the string passed in as `utf8Name` -#### *napi_get_named_property* +#### napi_get_named_property @@ -2623,7 +2623,7 @@ Returns `napi_ok` if the API succeeded. This method is equivalent to calling [`napi_get_property`][] with a `napi_value` created from the string passed in as `utf8Name` -#### *napi_has_named_property* +#### napi_has_named_property @@ -2644,7 +2644,7 @@ Returns `napi_ok` if the API succeeded. This method is equivalent to calling [`napi_has_property`][] with a `napi_value` created from the string passed in as `utf8Name` -#### *napi_set_element* +#### napi_set_element @@ -2664,7 +2664,7 @@ Returns `napi_ok` if the API succeeded. This API sets and element on the Object passed in. -#### *napi_get_element* +#### napi_get_element @@ -2684,7 +2684,7 @@ Returns `napi_ok` if the API succeeded. This API gets the element at the requested index. -#### *napi_has_element* +#### napi_has_element @@ -2705,7 +2705,7 @@ Returns `napi_ok` if the API succeeded. This API returns if the Object passed in has an element at the requested index. -#### *napi_delete_element* +#### napi_delete_element @@ -2726,7 +2726,7 @@ Returns `napi_ok` if the API succeeded. This API attempts to delete the specified `index` from `object`. -#### *napi_define_properties* +#### napi_define_properties @@ -2769,7 +2769,7 @@ like a regular JavaScript function call, or as a constructor function. -### *napi_call_function* +### napi_call_function @@ -2835,7 +2835,7 @@ status = napi_get_value_int32(env, return_val, &result); if (status != napi_ok) return; ``` -### *napi_create_function* +### napi_create_function @@ -2903,7 +2903,7 @@ myaddon.sayHello(); `NAPI_MODULE` in the earlier snippet but the name of the target in `binding.gyp` responsible for creating the `.node` file. -### *napi_get_cb_info* +### napi_get_cb_info @@ -2933,7 +2933,7 @@ Returns `napi_ok` if the API succeeded. This method is used within a callback function to retrieve details about the call like the arguments and the `this` pointer from a given callback info. -### *napi_get_new_target* +### napi_get_new_target @@ -2952,7 +2952,7 @@ Returns `napi_ok` if the API succeeded. This API returns the `new.target` of the constructor call. If the current callback is not a constructor call, the result is `NULL`. -### *napi_new_instance* +### napi_new_instance @@ -3047,7 +3047,7 @@ if (is_instance) { The reference must be freed once it is no longer needed. -### *napi_define_class* +### napi_define_class @@ -3103,7 +3103,7 @@ case, to prevent the function value from being garbage-collected, create a persistent reference to it using [`napi_create_reference`][] and ensure the reference count is kept >= 1. -### *napi_wrap* +### napi_wrap @@ -3165,7 +3165,7 @@ native instance associated with it by virtue of a previous call to another native instance with the given object, call `napi_remove_wrap()` on it first. -### *napi_unwrap* +### napi_unwrap @@ -3190,7 +3190,7 @@ method or accessor, then the `this` argument to the callback is the wrapper object; the wrapped C++ instance that is the target of the call can be obtained then by calling `napi_unwrap()` on the wrapper object. -### *napi_remove_wrap* +### napi_remove_wrap From 2e24a0bfe726c759db2795d67be7eb174ae38d3d Mon Sep 17 00:00:00 2001 From: Jinho Bang Date: Mon, 8 Jan 2018 23:37:27 +0900 Subject: [PATCH 192/227] n-api: throw RangeError napi_create_typedarray() According to the ECMA spec, we should throw a RangeError in the following cases: - `(length * elementSize) + offset` > the size of the array passed in - `offset % elementSize` != `0` In the current implementation, this check was omitted. So, the following code will cause a crash. ``` napi_create_typedarray(env, napi_uint16_array, 2 /* length */, buffer, 1 /* byte_offset */, &output_array); ``` This change fixes the problem and write some related tests. Refs: https://tc39.github.io/ecma262/#sec-typedarray-buffer-byteoffset-length Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18037 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Michael Dawson --- src/node_api.cc | 51 +++++++++++++++---- test/addons-napi/test_typedarray/test.js | 18 +++++++ .../test_typedarray/test_typedarray.c | 28 ++++++++-- 3 files changed, 85 insertions(+), 12 deletions(-) diff --git a/src/node_api.cc b/src/node_api.cc index 19b993300d1b00..93cce1909f0860 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -149,6 +149,30 @@ struct napi_env__ { (!try_catch.HasCaught() ? napi_ok \ : napi_set_last_error((env), napi_pending_exception)) +#define THROW_RANGE_ERROR_IF_FALSE(env, condition, error, message) \ + do { \ + if (!(condition)) { \ + napi_throw_range_error((env), (error), (message)); \ + return napi_set_last_error((env), napi_generic_failure); \ + } \ + } while (0) + +#define CREATE_TYPED_ARRAY( \ + env, type, size_of_element, buffer, byte_offset, length, out) \ + do { \ + if ((size_of_element) > 1) { \ + THROW_RANGE_ERROR_IF_FALSE( \ + (env), (byte_offset) % (size_of_element) == 0, \ + "ERR_NAPI_INVALID_TYPEDARRAY_ALIGNMENT", \ + "start offset of "#type" should be a multiple of "#size_of_element); \ + } \ + THROW_RANGE_ERROR_IF_FALSE((env), (length) * (size_of_element) + \ + (byte_offset) <= buffer->ByteLength(), \ + "ERR_NAPI_INVALID_TYPEDARRAY_LENGTH", \ + "Invalid typed array length"); \ + (out) = v8::type::New((buffer), (byte_offset), (length)); \ + } while (0) + namespace v8impl { // convert from n-api property attributes to v8::PropertyAttribute @@ -3157,31 +3181,40 @@ napi_status napi_create_typedarray(napi_env env, switch (type) { case napi_int8_array: - typedArray = v8::Int8Array::New(buffer, byte_offset, length); + CREATE_TYPED_ARRAY( + env, Int8Array, 1, buffer, byte_offset, length, typedArray); break; case napi_uint8_array: - typedArray = v8::Uint8Array::New(buffer, byte_offset, length); + CREATE_TYPED_ARRAY( + env, Uint8Array, 1, buffer, byte_offset, length, typedArray); break; case napi_uint8_clamped_array: - typedArray = v8::Uint8ClampedArray::New(buffer, byte_offset, length); + CREATE_TYPED_ARRAY( + env, Uint8ClampedArray, 1, buffer, byte_offset, length, typedArray); break; case napi_int16_array: - typedArray = v8::Int16Array::New(buffer, byte_offset, length); + CREATE_TYPED_ARRAY( + env, Int16Array, 2, buffer, byte_offset, length, typedArray); break; case napi_uint16_array: - typedArray = v8::Uint16Array::New(buffer, byte_offset, length); + CREATE_TYPED_ARRAY( + env, Uint16Array, 2, buffer, byte_offset, length, typedArray); break; case napi_int32_array: - typedArray = v8::Int32Array::New(buffer, byte_offset, length); + CREATE_TYPED_ARRAY( + env, Int32Array, 4, buffer, byte_offset, length, typedArray); break; case napi_uint32_array: - typedArray = v8::Uint32Array::New(buffer, byte_offset, length); + CREATE_TYPED_ARRAY( + env, Uint32Array, 4, buffer, byte_offset, length, typedArray); break; case napi_float32_array: - typedArray = v8::Float32Array::New(buffer, byte_offset, length); + CREATE_TYPED_ARRAY( + env, Float32Array, 4, buffer, byte_offset, length, typedArray); break; case napi_float64_array: - typedArray = v8::Float64Array::New(buffer, byte_offset, length); + CREATE_TYPED_ARRAY( + env, Float64Array, 8, buffer, byte_offset, length, typedArray); break; default: return napi_set_last_error(env, napi_invalid_arg); diff --git a/test/addons-napi/test_typedarray/test.js b/test/addons-napi/test_typedarray/test.js index 27ef054fe4635e..4a4e79ebe7bcdb 100644 --- a/test/addons-napi/test_typedarray/test.js +++ b/test/addons-napi/test_typedarray/test.js @@ -55,3 +55,21 @@ arrayTypes.forEach((currentType) => { assert.notStrictEqual(theArray, template); assert.strictEqual(theArray.buffer, buffer); }); + +arrayTypes.forEach((currentType) => { + const template = Reflect.construct(currentType, buffer); + assert.throws(() => { + test_typedarray.CreateTypedArray(template, buffer, 0, 136); + }, /Invalid typed array length/); +}); + +const nonByteArrayTypes = [ Int16Array, Uint16Array, Int32Array, Uint32Array, + Float32Array, Float64Array ]; +nonByteArrayTypes.forEach((currentType) => { + const template = Reflect.construct(currentType, buffer); + assert.throws(() => { + test_typedarray.CreateTypedArray(template, buffer, + currentType.BYTES_PER_ELEMENT + 1, 1); + console.log(`start of offset ${currentType}`); + }, /start offset of/); +}); diff --git a/test/addons-napi/test_typedarray/test_typedarray.c b/test/addons-napi/test_typedarray/test_typedarray.c index 93995d450c440b..febb45e715cb11 100644 --- a/test/addons-napi/test_typedarray/test_typedarray.c +++ b/test/addons-napi/test_typedarray/test_typedarray.c @@ -97,11 +97,11 @@ napi_value External(napi_env env, napi_callback_info info) { } napi_value CreateTypedArray(napi_env env, napi_callback_info info) { - size_t argc = 2; - napi_value args[2]; + size_t argc = 4; + napi_value args[4]; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); - NAPI_ASSERT(env, argc == 2, "Wrong number of arguments"); + NAPI_ASSERT(env, argc == 2 || argc == 4, "Wrong number of arguments"); napi_value input_array = args[0]; napi_valuetype valuetype0; @@ -136,6 +136,28 @@ napi_value CreateTypedArray(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_get_typedarray_info( env, input_array, &type, &length, NULL, &in_array_buffer, &byte_offset)); + if (argc == 4) { + napi_valuetype valuetype2; + NAPI_CALL(env, napi_typeof(env, args[2], &valuetype2)); + + NAPI_ASSERT(env, valuetype2 == napi_number, + "Wrong type of arguments. Expects a number as third argument."); + + uint32_t uint32_length; + NAPI_CALL(env, napi_get_value_uint32(env, args[2], &uint32_length)); + length = uint32_length; + + napi_valuetype valuetype3; + NAPI_CALL(env, napi_typeof(env, args[3], &valuetype3)); + + NAPI_ASSERT(env, valuetype3 == napi_number, + "Wrong type of arguments. Expects a number as third argument."); + + uint32_t uint32_byte_offset; + NAPI_CALL(env, napi_get_value_uint32(env, args[3], &uint32_byte_offset)); + byte_offset = uint32_byte_offset; + } + napi_value output_array; NAPI_CALL(env, napi_create_typedarray( env, type, length, input_buffer, byte_offset, &output_array)); From b3806ecd39d1459d1be052879e751e903cbee65c Mon Sep 17 00:00:00 2001 From: furstenheim Date: Sun, 14 Jan 2018 19:01:51 +0100 Subject: [PATCH 193/227] test: fixed typos in napi test MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18148 Reviewed-By: James M Snell Reviewed-By: Jon Moss Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson Reviewed-By: Anna Henningsen Reviewed-By: Tobias Nießen Reviewed-By: Luigi Pinca --- test/addons-napi/test_symbol/test_symbol.c | 2 +- test/addons-napi/test_typedarray/test_typedarray.c | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/test/addons-napi/test_symbol/test_symbol.c b/test/addons-napi/test_symbol/test_symbol.c index acb1c7f715409a..87f6cdde671600 100644 --- a/test/addons-napi/test_symbol/test_symbol.c +++ b/test/addons-napi/test_symbol/test_symbol.c @@ -12,7 +12,7 @@ napi_value Test(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_typeof(env, args[0], &valuetype)); NAPI_ASSERT(env, valuetype == napi_symbol, - "Wrong type of argments. Expects a symbol."); + "Wrong type of arguments. Expects a symbol."); char buffer[128]; size_t buffer_size = 128; diff --git a/test/addons-napi/test_typedarray/test_typedarray.c b/test/addons-napi/test_typedarray/test_typedarray.c index febb45e715cb11..278812f98d5971 100644 --- a/test/addons-napi/test_typedarray/test_typedarray.c +++ b/test/addons-napi/test_typedarray/test_typedarray.c @@ -13,20 +13,20 @@ napi_value Multiply(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_typeof(env, args[0], &valuetype0)); NAPI_ASSERT(env, valuetype0 == napi_object, - "Wrong type of argments. Expects a typed array as first argument."); + "Wrong type of arguments. Expects a typed array as first argument."); napi_value input_array = args[0]; bool is_typedarray; NAPI_CALL(env, napi_is_typedarray(env, input_array, &is_typedarray)); NAPI_ASSERT(env, is_typedarray, - "Wrong type of argments. Expects a typed array as first argument."); + "Wrong type of arguments. Expects a typed array as first argument."); napi_valuetype valuetype1; NAPI_CALL(env, napi_typeof(env, args[1], &valuetype1)); NAPI_ASSERT(env, valuetype1 == napi_number, - "Wrong type of argments. Expects a number as second argument."); + "Wrong type of arguments. Expects a number as second argument."); double multiplier; NAPI_CALL(env, napi_get_value_double(env, args[1], &multiplier)); @@ -108,26 +108,26 @@ napi_value CreateTypedArray(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_typeof(env, input_array, &valuetype0)); NAPI_ASSERT(env, valuetype0 == napi_object, - "Wrong type of argments. Expects a typed array as first argument."); + "Wrong type of arguments. Expects a typed array as first argument."); bool is_typedarray; NAPI_CALL(env, napi_is_typedarray(env, input_array, &is_typedarray)); NAPI_ASSERT(env, is_typedarray, - "Wrong type of argments. Expects a typed array as first argument."); + "Wrong type of arguments. Expects a typed array as first argument."); napi_valuetype valuetype1; napi_value input_buffer = args[1]; NAPI_CALL(env, napi_typeof(env, input_buffer, &valuetype1)); NAPI_ASSERT(env, valuetype1 == napi_object, - "Wrong type of argments. Expects an array buffer as second argument."); + "Wrong type of arguments. Expects an array buffer as second argument."); bool is_arraybuffer; NAPI_CALL(env, napi_is_arraybuffer(env, input_buffer, &is_arraybuffer)); NAPI_ASSERT(env, is_arraybuffer, - "Wrong type of argments. Expects an array buffer as second argument."); + "Wrong type of arguments. Expects an array buffer as second argument."); napi_typedarray_type type; napi_value in_array_buffer; From 74086e19f2585f0d73aab78c8e682423bc63c241 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Tue, 16 Jan 2018 14:05:26 -0500 Subject: [PATCH 194/227] doc: remove uannecessary Require This was the only instance were we said a parameter was required. It is assumed parameters are required unless the doc says they are option. Remove `Required` to make consistent with the rest of the doc Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18184 Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Gireesh Punathil Reviewed-By: Daniel Bevenius --- doc/api/n-api.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 9fc99759101489..c82ace15ed48b7 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3281,7 +3281,7 @@ napi_status napi_create_async_work(napi_env env, - `[in] env`: The environment that the API is invoked under. - `[in] async_resource`: An optional object associated with the async work that will be passed to possible async_hooks [`init` hooks][]. -- `[in] async_resource_name`: An identifier for the kind of resource that is +- `[in] async_resource_name`: Identifier for the kind of resource that is being provided for diagnostic information exposed by the `async_hooks` API. - `[in] execute`: The native function which should be called to excute the logic asynchronously. @@ -3380,7 +3380,7 @@ napi_status napi_async_init(napi_env env, - `[in] env`: The environment that the API is invoked under. - `[in] async_resource`: An optional object associated with the async work that will be passed to possible `async_hooks` [`init` hooks][]. -- `[in] async_resource_name`: Required identifier for the kind of resource +- `[in] async_resource_name`: Identifier for the kind of resource that is being provided for diagnostic information exposed by the `async_hooks` API. - `[out] result`: The initialized async context. From eceb70b584646477f6f4322f8d1b5f89019a19e5 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 23 Jan 2018 22:18:38 -0800 Subject: [PATCH 195/227] test: refactor addons-napi/test_exception/test.js * provide block scoping to prevent unintended side effects * remove confusing and unnecessary assertion message * use consisitent `actual, expected` argument order for assertions Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18340 Reviewed-By: Colin Ihrig Reviewed-By: Jon Moss Reviewed-By: James M Snell --- test/addons-napi/test_exception/test.js | 86 ++++++++++++------------- 1 file changed, 42 insertions(+), 44 deletions(-) diff --git a/test/addons-napi/test_exception/test.js b/test/addons-napi/test_exception/test.js index 8bd2f50b12b15f..83961411df7574 100644 --- a/test/addons-napi/test_exception/test.js +++ b/test/addons-napi/test_exception/test.js @@ -4,49 +4,47 @@ const common = require('../../common'); const test_exception = require(`./build/${common.buildType}/test_exception`); const assert = require('assert'); const theError = new Error('Some error'); -function throwTheError() { - throw theError; + +{ + const throwTheError = () => { throw theError; }; + + // Test that the native side successfully captures the exception + let returnedError = test_exception.returnException(throwTheError); + assert.strictEqual(theError, returnedError); + + // Test that the native side passes the exception through + assert.throws( + () => { test_exception.allowException(throwTheError); }, + (err) => err === theError + ); + + // Test that the exception thrown above was marked as pending + // before it was handled on the JS side + assert.strictEqual(test_exception.wasPending(), true, + 'VM was marked as having an exception pending' + + ' when it was allowed through'); + + // Test that the native side does not capture a non-existing exception + returnedError = test_exception.returnException(common.mustCall()); + assert.strictEqual(returnedError, undefined, + 'Returned error should be undefined when no exception is' + + ` thrown, but ${returnedError} was passed`); } -let caughtError; - -// Test that the native side successfully captures the exception -let returnedError = test_exception.returnException(throwTheError); -assert.strictEqual(theError, returnedError); - -// Test that the native side passes the exception through -assert.throws( - () => { - test_exception.allowException(throwTheError); - }, - function(err) { - return err === theError; - }, - 'Thrown exception was allowed to pass through unhindered' -); - -// Test that the exception thrown above was marked as pending -// before it was handled on the JS side -assert.strictEqual(test_exception.wasPending(), true, - 'VM was marked as having an exception pending' + - ' when it was allowed through'); - -// Test that the native side does not capture a non-existing exception -returnedError = test_exception.returnException(common.mustCall()); -assert.strictEqual(undefined, returnedError, - 'Returned error should be undefined when no exception is' + - ` thrown, but ${returnedError} was passed`); - -// Test that no exception appears that was not thrown by us -try { - test_exception.allowException(common.mustCall()); -} catch (anError) { - caughtError = anError; + +{ + // Test that no exception appears that was not thrown by us + let caughtError; + try { + test_exception.allowException(common.mustCall()); + } catch (anError) { + caughtError = anError; + } + assert.strictEqual(caughtError, undefined, + 'No exception originated on the native side, but' + + ` ${caughtError} was passed`); + + // Test that the exception state remains clear when no exception is thrown + assert.strictEqual(test_exception.wasPending(), false, + 'VM was not marked as having an exception pending' + + ' when none was allowed through'); } -assert.strictEqual(undefined, caughtError, - 'No exception originated on the native side, but' + - ` ${caughtError} was passed`); - -// Test that the exception state remains clear when no exception is thrown -assert.strictEqual(test_exception.wasPending(), false, - 'VM was not marked as having an exception pending' + - ' when none was allowed through'); From d9df8cfe77ecf3ddd6cb1da4e608bca5d0748a46 Mon Sep 17 00:00:00 2001 From: Aaron Kau Date: Sat, 27 Jan 2018 14:20:13 -0500 Subject: [PATCH 196/227] n-api: change assert ok check to notStrictEqual. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18414 Reviewed-By: Gireesh Punathil Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater --- test/addons-napi/test_general/test.js | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js index 10b85ef727bb20..15461cef2f6a36 100644 --- a/test/addons-napi/test_general/test.js +++ b/test/addons-napi/test_general/test.js @@ -28,9 +28,9 @@ assert.strictEqual(test_general.testGetPrototype(baseObject), Object.getPrototypeOf(baseObject)); assert.strictEqual(test_general.testGetPrototype(extendedObject), Object.getPrototypeOf(extendedObject)); -assert.ok(test_general.testGetPrototype(baseObject) !== - test_general.testGetPrototype(extendedObject), - 'Prototypes for base and extended should be different'); +// Prototypes for base and extended should be different. +assert.notStrictEqual(test_general.testGetPrototype(baseObject), + test_general.testGetPrototype(extendedObject)); // test version management funcitons // expected version is currently 1 @@ -70,17 +70,15 @@ assert.strictEqual(test_general.derefItemWasCalled(), true, // Assert that wrapping twice fails. const x = {}; test_general.wrap(x); -assert.throws(function() { - test_general.wrap(x); -}, Error); +assert.throws(() => test_general.wrap(x), Error); // Ensure that wrapping, removing the wrap, and then wrapping again works. const y = {}; test_general.wrap(y); test_general.removeWrap(y); -assert.doesNotThrow(function() { - test_general.wrap(y); -}, Error, 'Wrapping twice succeeds if a remove_wrap() separates the instances'); +assert.doesNotThrow(() => test_general.wrap(y), Error, + 'Wrapping twice succeeds if a remove_wrap()' + + ' separates the instances'); // Ensure that removing a wrap and garbage collecting does not fire the // finalize callback. From 59249a17688c147147674f1e6a7b374883681407 Mon Sep 17 00:00:00 2001 From: Ben Wilcox Date: Sat, 27 Jan 2018 14:39:55 -0500 Subject: [PATCH 197/227] test: show pending exception error in napi tests Shows the result of the wasPending in the error message if the assertion fails. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18413 Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Ruben Bridgewater --- test/addons-napi/test_exception/test.js | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/test/addons-napi/test_exception/test.js b/test/addons-napi/test_exception/test.js index 83961411df7574..787b7d78b1c72b 100644 --- a/test/addons-napi/test_exception/test.js +++ b/test/addons-napi/test_exception/test.js @@ -20,9 +20,10 @@ const theError = new Error('Some error'); // Test that the exception thrown above was marked as pending // before it was handled on the JS side - assert.strictEqual(test_exception.wasPending(), true, - 'VM was marked as having an exception pending' + - ' when it was allowed through'); + const exception_pending = test_exception.wasPending(); + assert.strictEqual(exception_pending, true, + 'Exception not pending as expected,' + + ` .wasPending() returned ${exception_pending}`); // Test that the native side does not capture a non-existing exception returnedError = test_exception.returnException(common.mustCall()); @@ -44,7 +45,8 @@ const theError = new Error('Some error'); ` ${caughtError} was passed`); // Test that the exception state remains clear when no exception is thrown - assert.strictEqual(test_exception.wasPending(), false, - 'VM was not marked as having an exception pending' + - ' when none was allowed through'); + const exception_pending = test_exception.wasPending(); + assert.strictEqual(exception_pending, false, + 'Exception state did not remain clear as expected,' + + ` .wasPending() returned ${exception_pending}`); } From b565ba2d820f3d033548a326a137d1e2e2cd9f13 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Mon, 22 Jan 2018 23:13:02 -0500 Subject: [PATCH 198/227] n-api: implement wrapping using private properties Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18311 Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson Fixes: https://github.com/nodejs/node/issues/14367 --- src/env.h | 2 + src/node_api.cc | 160 ++++++++++++---------------------------- src/node_api_backport.h | 1 + 3 files changed, 52 insertions(+), 111 deletions(-) diff --git a/src/env.h b/src/env.h index 36ca7004ad748c..0045a4fa79c95e 100644 --- a/src/env.h +++ b/src/env.h @@ -62,6 +62,8 @@ namespace node { V(npn_buffer_private_symbol, "node:npnBuffer") \ V(processed_private_symbol, "node:processed") \ V(selected_npn_buffer_private_symbol, "node:selectedNpnBuffer") \ + V(napi_env, "node:napi:env") \ + V(napi_wrapper, "node:napi:wrapper") \ // Strings are per-isolate primitives but Environment proxies them // for the sake of convenience. Strings should be ASCII-only. diff --git a/src/node_api.cc b/src/node_api.cc index 93cce1909f0860..dc32986e3adfc3 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -20,7 +20,6 @@ #include "node_internals.h" #include "env-inl.h" #include "node_api_backport.h" -#include "util.h" static napi_status napi_set_last_error(napi_env env, napi_status error_code, @@ -53,6 +52,9 @@ struct napi_env__ { uv_loop_t* loop = nullptr; }; +#define NAPI_PRIVATE_KEY(context, suffix) \ + (node::Environment::GetCurrent((context))->napi_ ## suffix()) + #define ENV_OBJECT_TEMPLATE(env, prefix, destination, field_count) \ do { \ if ((env)->prefix ## _template.IsEmpty()) { \ @@ -383,6 +385,10 @@ class Reference : private Finalizer { } public: + void* Data() { + return _finalize_data; + } + static Reference* New(napi_env env, v8::Local value, uint32_t initial_refcount, @@ -742,45 +748,6 @@ v8::Local CreateAccessorCallbackData(napi_env env, return cbdata; } -int kWrapperFields = 3; - -// Pointer used to identify items wrapped by N-API. Used by FindWrapper and -// napi_wrap(). -const char napi_wrap_name[] = "N-API Wrapper"; - -// Search the object's prototype chain for the wrapper object. Usually the -// wrapper would be the first in the chain, but it is OK for other objects to -// be inserted in the prototype chain. -static -bool FindWrapper(v8::Local obj, - v8::Local* result = nullptr, - v8::Local* parent = nullptr) { - v8::Local wrapper = obj; - - do { - v8::Local proto = wrapper->GetPrototype(); - if (proto.IsEmpty() || !proto->IsObject()) { - return false; - } - if (parent != nullptr) { - *parent = wrapper; - } - wrapper = proto.As(); - if (wrapper->InternalFieldCount() == kWrapperFields) { - v8::Local external = wrapper->GetInternalField(1); - if (external->IsExternal() && - external.As()->Value() == v8impl::napi_wrap_name) { - break; - } - } - } while (true); - - if (result != nullptr) { - *result = wrapper; - } - return true; -} - static void DeleteEnv(napi_env env, void* data, void* hint) { delete env; } @@ -797,11 +764,8 @@ napi_env GetEnv(v8::Local context) { // because we need to stop hard if either of them is empty. // // Re https://github.com/nodejs/node/pull/14217#discussion_r128775149 - auto key = v8::Private::ForApi(isolate, - v8::String::NewFromOneByte(isolate, - reinterpret_cast("N-API Environment"), - v8::NewStringType::kInternalized).ToLocalChecked()); - auto value = global->GetPrivate(context, key).ToLocalChecked(); + auto value = global->GetPrivate(context, NAPI_PRIVATE_KEY(context, env)) + .ToLocalChecked(); if (value->IsExternal()) { result = static_cast(value.As()->Value()); @@ -811,7 +775,8 @@ napi_env GetEnv(v8::Local context) { // We must also stop hard if the result of assigning the env to the global // is either nothing or false. - CHECK(global->SetPrivate(context, key, external).FromJust()); + CHECK(global->SetPrivate(context, NAPI_PRIVATE_KEY(context, env), external) + .FromJust()); // Create a self-destructing reference to external that will get rid of the // napi_env when external goes out of scope. @@ -821,28 +786,46 @@ napi_env GetEnv(v8::Local context) { return result; } +enum UnwrapAction { + KeepWrap, + RemoveWrap +}; + static napi_status Unwrap(napi_env env, napi_value js_object, void** result, - v8::Local* wrapper, - v8::Local* parent = nullptr) { + UnwrapAction action) { + NAPI_PREAMBLE(env); CHECK_ARG(env, js_object); - CHECK_ARG(env, result); + if (action == KeepWrap) { + CHECK_ARG(env, result); + } + + v8::Isolate* isolate = env->isolate; + v8::Local context = isolate->GetCurrentContext(); v8::Local value = v8impl::V8LocalValueFromJsValue(js_object); RETURN_STATUS_IF_FALSE(env, value->IsObject(), napi_invalid_arg); v8::Local obj = value.As(); - RETURN_STATUS_IF_FALSE( - env, v8impl::FindWrapper(obj, wrapper, parent), napi_invalid_arg); + auto val = obj->GetPrivate(context, NAPI_PRIVATE_KEY(context, wrapper)) + .ToLocalChecked(); + RETURN_STATUS_IF_FALSE(env, val->IsExternal(), napi_invalid_arg); + Reference* reference = + static_cast(val.As()->Value()); - v8::Local unwrappedValue = (*wrapper)->GetInternalField(0); - RETURN_STATUS_IF_FALSE(env, unwrappedValue->IsExternal(), napi_invalid_arg); + if (result) { + *result = reference->Data(); + } - *result = unwrappedValue.As()->Value(); + if (action == RemoveWrap) { + CHECK(obj->DeletePrivate(context, NAPI_PRIVATE_KEY(context, wrapper)) + .FromJust()); + Reference::Delete(reference); + } - return napi_ok; + return GET_RETURN_STATUS(env); } static @@ -2399,26 +2382,9 @@ napi_status napi_wrap(napi_env env, v8::Local obj = value.As(); // If we've already wrapped this object, we error out. - RETURN_STATUS_IF_FALSE(env, !v8impl::FindWrapper(obj), napi_invalid_arg); - - // Create a wrapper object with an internal field to hold the wrapped pointer - // and a second internal field to identify the owner as N-API. - v8::Local wrapper_template; - ENV_OBJECT_TEMPLATE(env, wrap, wrapper_template, v8impl::kWrapperFields); - - auto maybe_object = wrapper_template->NewInstance(context); - CHECK_MAYBE_EMPTY(env, maybe_object, napi_generic_failure); - v8::Local wrapper = maybe_object.ToLocalChecked(); - - // Store the pointer as an external in the wrapper. - wrapper->SetInternalField(0, v8::External::New(isolate, native_object)); - wrapper->SetInternalField(1, v8::External::New(isolate, - reinterpret_cast(const_cast(v8impl::napi_wrap_name)))); - - // Insert the wrapper into the object's prototype chain. - v8::Local proto = obj->GetPrototype(); - CHECK(wrapper->SetPrototype(context, proto).FromJust()); - CHECK(obj->SetPrototype(context, wrapper).FromJust()); + RETURN_STATUS_IF_FALSE(env, + !obj->HasPrivate(context, NAPI_PRIVATE_KEY(context, wrapper)).FromJust(), + napi_invalid_arg); v8impl::Reference* reference = nullptr; if (result != nullptr) { @@ -2430,52 +2396,24 @@ napi_status napi_wrap(napi_env env, reference = v8impl::Reference::New( env, obj, 0, false, finalize_cb, native_object, finalize_hint); *result = reinterpret_cast(reference); - } else if (finalize_cb != nullptr) { - // Create a self-deleting reference just for the finalize callback. - reference = v8impl::Reference::New( - env, obj, 0, true, finalize_cb, native_object, finalize_hint); + } else { + // Create a self-deleting reference. + reference = v8impl::Reference::New(env, obj, 0, true, finalize_cb, + native_object, finalize_cb == nullptr ? nullptr : finalize_hint); } - if (reference != nullptr) { - wrapper->SetInternalField(2, v8::External::New(isolate, reference)); - } + CHECK(obj->SetPrivate(context, NAPI_PRIVATE_KEY(context, wrapper), + v8::External::New(isolate, reference)).FromJust()); return GET_RETURN_STATUS(env); } napi_status napi_unwrap(napi_env env, napi_value obj, void** result) { - // Omit NAPI_PREAMBLE and GET_RETURN_STATUS because V8 calls here cannot throw - // JS exceptions. - CHECK_ENV(env); - v8::Local wrapper; - return napi_set_last_error(env, v8impl::Unwrap(env, obj, result, &wrapper)); + return v8impl::Unwrap(env, obj, result, v8impl::KeepWrap); } napi_status napi_remove_wrap(napi_env env, napi_value obj, void** result) { - NAPI_PREAMBLE(env); - v8::Local wrapper; - v8::Local parent; - napi_status status = v8impl::Unwrap(env, obj, result, &wrapper, &parent); - if (status != napi_ok) { - return napi_set_last_error(env, status); - } - - v8::Local external = wrapper->GetInternalField(2); - if (external->IsExternal()) { - v8impl::Reference::Delete( - static_cast(external.As()->Value())); - } - - if (!parent.IsEmpty()) { - v8::Maybe maybe = parent->SetPrototype( - env->isolate->GetCurrentContext(), wrapper->GetPrototype()); - CHECK_MAYBE_NOTHING(env, maybe, napi_generic_failure); - if (!maybe.FromMaybe(false)) { - return napi_set_last_error(env, napi_generic_failure); - } - } - - return GET_RETURN_STATUS(env); + return v8impl::Unwrap(env, obj, result, v8impl::RemoveWrap); } napi_status napi_create_external(napi_env env, diff --git a/src/node_api_backport.h b/src/node_api_backport.h index 968169ef3bd05f..1333b33eddfe14 100644 --- a/src/node_api_backport.h +++ b/src/node_api_backport.h @@ -6,6 +6,7 @@ // N-API. #include "node_internals.h" +#include "env-inl.h" namespace node { From c09a7134e78ce435db7aa4f2ef73a71825eac7dd Mon Sep 17 00:00:00 2001 From: Ben Noordhuis Date: Fri, 2 Feb 2018 17:27:01 +0100 Subject: [PATCH 199/227] n-api: wrap control flow macro in do/while Make CHECK_ENV() safe to use in the following context: if (condition) CHECK_ENV(env); else something_else(); Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18532 Reviewed-By: Michael Dawson Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Ali Ijaz Sheikh Reviewed-By: Ruben Bridgewater Reviewed-By: Tiancheng "Timothy" Gu Reviewed-By: Daniel Bevenius --- src/node_api.cc | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/node_api.cc b/src/node_api.cc index dc32986e3adfc3..58803b30bcc96b 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -75,10 +75,12 @@ struct napi_env__ { } \ } while (0) -#define CHECK_ENV(env) \ - if ((env) == nullptr) { \ - return napi_invalid_arg; \ - } +#define CHECK_ENV(env) \ + do { \ + if ((env) == nullptr) { \ + return napi_invalid_arg; \ + } \ + } while (0) #define CHECK_ARG(env, arg) \ RETURN_STATUS_IF_FALSE((env), ((arg) != nullptr), napi_invalid_arg) From 24a27911737658216fc11afa98c22e47a35cfd10 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 2 Feb 2018 10:33:01 -0500 Subject: [PATCH 200/227] doc: remove usage of you in n-api doc We avoid using 'you' in the documentation based on our guidelines. Remove usage in the n-api doc. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18528 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Ruben Bridgewater Reviewed-By: Gireesh Punathil Reviewed-By: Daniel Bevenius --- doc/api/n-api.md | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index c82ace15ed48b7..6084fab87a6709 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3159,11 +3159,8 @@ required in order to enable correct proper of the reference. Afterward, additional manipulation of the wrapper's prototype chain may cause `napi_unwrap()` to fail. -*Note*: Calling `napi_wrap()` a second time on an object that already has a -native instance associated with it by virtue of a previous call to -`napi_wrap()` will cause an error to be returned. If you wish to associate -another native instance with the given object, call `napi_remove_wrap()` on it -first. +Calling napi_wrap() a second time on an object will return an error. To associate +another native instance with the object, use napi_remove_wrap() first. ### napi_unwrap +```C +NAPI_EXTERN napi_status napi_open_callback_scope(napi_env env, + napi_value resource_object, + napi_async_context context, + napi_callback_scope* result) +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] resource_object`: An optional object associated with the async work + that will be passed to possible async_hooks [`init` hooks][]. +- `[in] context`: Context for the async operation that is +invoking the callback. This should be a value previously obtained +from [`napi_async_init`][]. +- `[out] result`: The newly created scope. + +There are cases(for example resolving promises) where it is +necessary to have the equivalent of the scope associated with a callback +in place when making certain N-API calls. If there is no other script on +the stack the [`napi_open_callback_scope`][] and +[`napi_close_callback_scope`][] functions can be used to open/close +the required scope. + +### *napi_close_callback_scope* + +```C +NAPI_EXTERN napi_status napi_close_callback_scope(napi_env env, + napi_callback_scope scope) +``` +- `[in] env`: The environment that the API is invoked under. +- `[in] scope`: The scope to be closed. + ## Version Management ### napi_get_node_version @@ -3723,6 +3759,7 @@ NAPI_EXTERN napi_status napi_get_uv_event_loop(napi_env env, [`napi_async_init`]: #n_api_napi_async_init [`napi_cancel_async_work`]: #n_api_napi_cancel_async_work [`napi_close_escapable_handle_scope`]: #n_api_napi_close_escapable_handle_scope +[`napi_close_callback_scope`]: #n_api_napi_close_callback_scope [`napi_close_handle_scope`]: #n_api_napi_close_handle_scope [`napi_create_async_work`]: #n_api_napi_create_async_work [`napi_create_error`]: #n_api_napi_create_error @@ -3749,6 +3786,7 @@ NAPI_EXTERN napi_status napi_get_uv_event_loop(napi_env env, [`napi_get_last_error_info`]: #n_api_napi_get_last_error_info [`napi_get_and_clear_last_exception`]: #n_api_napi_get_and_clear_last_exception [`napi_make_callback`]: #n_api_napi_make_callback +[`napi_open_callback_scope`]: #n_api_napi_open_callback_scope [`napi_open_escapable_handle_scope`]: #n_api_napi_open_escapable_handle_scope [`napi_open_handle_scope`]: #n_api_napi_open_handle_scope [`napi_property_descriptor`]: #n_api_napi_property_descriptor diff --git a/src/env-inl.h b/src/env-inl.h index 45a327de16931d..9b4000d75b180d 100644 --- a/src/env-inl.h +++ b/src/env-inl.h @@ -111,7 +111,7 @@ inline Environment::AsyncCallbackScope::~AsyncCallbackScope() { env_->makecallback_cntr_--; } -inline bool Environment::AsyncCallbackScope::in_makecallback() { +inline bool Environment::AsyncCallbackScope::in_makecallback() const { return env_->makecallback_cntr_ > 1; } diff --git a/src/env.h b/src/env.h index 0045a4fa79c95e..593f38964c7839 100644 --- a/src/env.h +++ b/src/env.h @@ -297,7 +297,7 @@ class Environment { explicit AsyncCallbackScope(Environment* env); ~AsyncCallbackScope(); - inline bool in_makecallback(); + inline bool in_makecallback() const; private: Environment* env_; diff --git a/src/node_api.cc b/src/node_api.cc index 58803b30bcc96b..a5902db0454eba 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -49,6 +49,7 @@ struct napi_env__ { bool has_instance_available; napi_extended_error_info last_error; int open_handle_scopes = 0; + int open_callback_scopes = 0; uv_loop_t* loop = nullptr; }; @@ -261,6 +262,18 @@ V8EscapableHandleScopeFromJsEscapableHandleScope( return reinterpret_cast(s); } +static +napi_callback_scope JsCallbackScopeFromV8CallbackScope( + node::CallbackScope* s) { + return reinterpret_cast(s); +} + +static +node::CallbackScope* V8CallbackScopeFromJsCallbackScope( + napi_callback_scope s) { + return reinterpret_cast(s); +} + //=== Conversion between V8 Handles and napi_value ======================== // This asserts v8::Local<> will always be implemented with a single @@ -552,6 +565,7 @@ class CallbackWrapperBase : public CallbackWrapper { napi_clear_last_error(env); int open_handle_scopes = env->open_handle_scopes; + int open_callback_scopes = env->open_callback_scopes; napi_value result = cb(env, cbinfo_wrapper); @@ -560,6 +574,7 @@ class CallbackWrapperBase : public CallbackWrapper { } CHECK_EQ(env->open_handle_scopes, open_handle_scopes); + CHECK_EQ(env->open_callback_scopes, open_callback_scopes); if (!env->last_exception.IsEmpty()) { isolate->ThrowException( @@ -917,7 +932,8 @@ const char* error_messages[] = {nullptr, "An exception is pending", "The async work item was cancelled", "napi_escape_handle already called on scope", - "Invalid handle scope usage"}; + "Invalid handle scope usage", + "Invalid callback scope usage"}; static inline napi_status napi_clear_last_error(napi_env env) { env->last_error.error_code = napi_ok; @@ -948,9 +964,9 @@ napi_status napi_get_last_error_info(napi_env env, // We don't have a napi_status_last as this would result in an ABI // change each time a message was added. static_assert( - node::arraysize(error_messages) == napi_handle_scope_mismatch + 1, + node::arraysize(error_messages) == napi_callback_scope_mismatch + 1, "Count of error messages must match count of error values"); - CHECK_LE(env->last_error.error_code, napi_handle_scope_mismatch); + CHECK_LE(env->last_error.error_code, napi_callback_scope_mismatch); // Wait until someone requests the last error information to fetch the error // message string @@ -2639,6 +2655,46 @@ napi_status napi_escape_handle(napi_env env, return napi_set_last_error(env, napi_escape_called_twice); } +napi_status napi_open_callback_scope(napi_env env, + napi_value resource_object, + napi_async_context async_context_handle, + napi_callback_scope* result) { + // Omit NAPI_PREAMBLE and GET_RETURN_STATUS because V8 calls here cannot throw + // JS exceptions. + CHECK_ENV(env); + CHECK_ARG(env, result); + + v8::Local context = env->isolate->GetCurrentContext(); + + node::async_context* node_async_context = + reinterpret_cast(async_context_handle); + + v8::Local resource; + CHECK_TO_OBJECT(env, context, resource, resource_object); + + *result = v8impl::JsCallbackScopeFromV8CallbackScope( + new node::CallbackScope(env->isolate, + resource, + *node_async_context)); + + env->open_callback_scopes++; + return napi_clear_last_error(env); +} + +napi_status napi_close_callback_scope(napi_env env, napi_callback_scope scope) { + // Omit NAPI_PREAMBLE and GET_RETURN_STATUS because V8 calls here cannot throw + // JS exceptions. + CHECK_ENV(env); + CHECK_ARG(env, scope); + if (env->open_callback_scopes == 0) { + return napi_callback_scope_mismatch; + } + + env->open_callback_scopes--; + delete v8impl::V8CallbackScopeFromJsCallbackScope(scope); + return napi_clear_last_error(env); +} + napi_status napi_new_instance(napi_env env, napi_value constructor, size_t argc, diff --git a/src/node_api.h b/src/node_api.h index 8e5eef8a47728f..3b741c27b976ae 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -424,6 +424,14 @@ NAPI_EXTERN napi_status napi_escape_handle(napi_env env, napi_value escapee, napi_value* result); +NAPI_EXTERN napi_status napi_open_callback_scope(napi_env env, + napi_value resource_object, + napi_async_context context, + napi_callback_scope* result); + +NAPI_EXTERN napi_status napi_close_callback_scope(napi_env env, + napi_callback_scope scope); + // Methods to support error handling NAPI_EXTERN napi_status napi_throw(napi_env env, napi_value error); NAPI_EXTERN napi_status napi_throw_error(napi_env env, diff --git a/src/node_api_types.h b/src/node_api_types.h index 230c1f4ae3446f..76f38802e83e2e 100644 --- a/src/node_api_types.h +++ b/src/node_api_types.h @@ -15,6 +15,7 @@ typedef struct napi_value__ *napi_value; typedef struct napi_ref__ *napi_ref; typedef struct napi_handle_scope__ *napi_handle_scope; typedef struct napi_escapable_handle_scope__ *napi_escapable_handle_scope; +typedef struct napi_callback_scope__ *napi_callback_scope; typedef struct napi_callback_info__ *napi_callback_info; typedef struct napi_async_context__ *napi_async_context; typedef struct napi_async_work__ *napi_async_work; @@ -70,7 +71,8 @@ typedef enum { napi_pending_exception, napi_cancelled, napi_escape_called_twice, - napi_handle_scope_mismatch + napi_handle_scope_mismatch, + napi_callback_scope_mismatch } napi_status; typedef napi_value (*napi_callback)(napi_env env, diff --git a/test/addons-napi/test_callback_scope/binding.cc b/test/addons-napi/test_callback_scope/binding.cc new file mode 100644 index 00000000000000..e6631b6ac7bb52 --- /dev/null +++ b/test/addons-napi/test_callback_scope/binding.cc @@ -0,0 +1,138 @@ +#include "node_api.h" +#include "uv.h" +#include "../common.h" + +namespace { + +// the test needs to fake out the async structure, so we need to use +// the raw structure here and then cast as done behind the scenes +// in napi calls. +struct async_context { + double async_id; + double trigger_async_id; +}; + + +napi_value RunInCallbackScope(napi_env env, napi_callback_info info) { + size_t argc; + napi_value args[4]; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, nullptr, nullptr, nullptr)); + NAPI_ASSERT(env, argc == 4 , "Wrong number of arguments"); + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, nullptr, nullptr)); + + napi_valuetype valuetype; + NAPI_CALL(env, napi_typeof(env, args[0], &valuetype)); + NAPI_ASSERT(env, valuetype == napi_object, + "Wrong type of arguments. Expects an object as first argument."); + + NAPI_CALL(env, napi_typeof(env, args[1], &valuetype)); + NAPI_ASSERT(env, valuetype == napi_number, + "Wrong type of arguments. Expects a number as second argument."); + + NAPI_CALL(env, napi_typeof(env, args[2], &valuetype)); + NAPI_ASSERT(env, valuetype == napi_number, + "Wrong type of arguments. Expects a number as third argument."); + + NAPI_CALL(env, napi_typeof(env, args[3], &valuetype)); + NAPI_ASSERT(env, valuetype == napi_function, + "Wrong type of arguments. Expects a function as third argument."); + + struct async_context context; + NAPI_CALL(env, napi_get_value_double(env, args[1], &context.async_id)); + NAPI_CALL(env, + napi_get_value_double(env, args[2], &context.trigger_async_id)); + + napi_callback_scope scope = nullptr; + NAPI_CALL( + env, + napi_open_callback_scope(env, + args[0], + reinterpret_cast(&context), + &scope)); + + // if the function has an exception pending after the call that is ok + // so we don't use NAPI_CALL as we must close the callback scope regardless + napi_value result = nullptr; + napi_status function_call_result = + napi_call_function(env, args[0], args[3], 0, nullptr, &result); + if (function_call_result != napi_ok) { + GET_AND_THROW_LAST_ERROR((env)); + } + + NAPI_CALL(env, napi_close_callback_scope(env, scope)); + + return result; +} + +static napi_env shared_env = nullptr; +static napi_deferred deferred = nullptr; + +static void Callback(uv_work_t* req, int ignored) { + napi_env env = shared_env; + + napi_handle_scope handle_scope = nullptr; + NAPI_CALL_RETURN_VOID(env, napi_open_handle_scope(env, &handle_scope)); + + napi_value resource_name; + NAPI_CALL_RETURN_VOID(env, napi_create_string_utf8( + env, "test", NAPI_AUTO_LENGTH, &resource_name)); + napi_async_context context; + NAPI_CALL_RETURN_VOID(env, + napi_async_init(env, nullptr, resource_name, &context)); + + napi_value resource_object; + NAPI_CALL_RETURN_VOID(env, napi_create_object(env, &resource_object)); + + napi_value undefined_value; + NAPI_CALL_RETURN_VOID(env, napi_get_undefined(env, &undefined_value)); + + napi_callback_scope scope = nullptr; + NAPI_CALL_RETURN_VOID(env, napi_open_callback_scope(env, + resource_object, + context, + &scope)); + + NAPI_CALL_RETURN_VOID(env, + napi_resolve_deferred(env, deferred, undefined_value)); + + NAPI_CALL_RETURN_VOID(env, napi_close_callback_scope(env, scope)); + + NAPI_CALL_RETURN_VOID(env, napi_close_handle_scope(env, handle_scope)); + delete req; +} + +napi_value TestResolveAsync(napi_env env, napi_callback_info info) { + napi_value promise = nullptr; + if (deferred == nullptr) { + shared_env = env; + NAPI_CALL(env, napi_create_promise(env, &deferred, &promise)); + + uv_loop_t* loop = nullptr; + NAPI_CALL(env, napi_get_uv_event_loop(env, &loop)); + + uv_work_t* req = new uv_work_t(); + uv_queue_work(loop, + req, + [](uv_work_t*) {}, + Callback); + } + return promise; +} + +napi_value Init(napi_env env, napi_value exports) { + napi_property_descriptor descriptors[] = { + DECLARE_NAPI_PROPERTY("runInCallbackScope", RunInCallbackScope), + DECLARE_NAPI_PROPERTY("testResolveAsync", TestResolveAsync) + }; + + NAPI_CALL(env, napi_define_properties( + env, exports, sizeof(descriptors) / sizeof(*descriptors), descriptors)); + + return exports; +} + +} // anonymous namespace + +NAPI_MODULE(NODE_GYP_MODULE_NAME, Init) diff --git a/test/addons-napi/test_callback_scope/binding.gyp b/test/addons-napi/test_callback_scope/binding.gyp new file mode 100644 index 00000000000000..7ede63d94a0d77 --- /dev/null +++ b/test/addons-napi/test_callback_scope/binding.gyp @@ -0,0 +1,9 @@ +{ + 'targets': [ + { + 'target_name': 'binding', + 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ], + 'sources': [ 'binding.cc' ] + } + ] +} diff --git a/test/addons-napi/test_callback_scope/test-resolve-async.js b/test/addons-napi/test_callback_scope/test-resolve-async.js new file mode 100644 index 00000000000000..e9f4b9044c0154 --- /dev/null +++ b/test/addons-napi/test_callback_scope/test-resolve-async.js @@ -0,0 +1,13 @@ +'use strict'; + +const common = require('../../common'); +const assert = require('assert'); +const { testResolveAsync } = require(`./build/${common.buildType}/binding`); + +let called = false; +testResolveAsync().then(common.mustCall(() => { + called = true; +})); + +setTimeout(common.mustCall(() => { assert(called); }), + common.platformTimeout(20)); diff --git a/test/addons-napi/test_callback_scope/test.js b/test/addons-napi/test_callback_scope/test.js new file mode 100644 index 00000000000000..2f2efe5f47b98a --- /dev/null +++ b/test/addons-napi/test_callback_scope/test.js @@ -0,0 +1,17 @@ +'use strict'; + +const common = require('../../common'); +const assert = require('assert'); +const { runInCallbackScope } = require(`./build/${common.buildType}/binding`); + +assert.strictEqual(runInCallbackScope({}, 0, 0, () => 42), 42); + +{ + process.once('uncaughtException', common.mustCall((err) => { + assert.strictEqual(err.message, 'foo'); + })); + + runInCallbackScope({}, 0, 0, () => { + throw new Error('foo'); + }); +} From 516c287f8ee758aa286fb6e22743fa3316ee92dd Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Fri, 2 Feb 2018 20:34:19 -0500 Subject: [PATCH 203/227] n-api: remove extra reference from test Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18542 Reviewed-By: Michael Dawson Reviewed-By: Tiancheng "Timothy" Gu Reviewed-By: Colin Ihrig --- test/addons-napi/test_constructor/test_constructor.c | 3 --- 1 file changed, 3 deletions(-) diff --git a/test/addons-napi/test_constructor/test_constructor.c b/test/addons-napi/test_constructor/test_constructor.c index 43623d8b60a2ef..bed61cc55cd5e9 100644 --- a/test/addons-napi/test_constructor/test_constructor.c +++ b/test/addons-napi/test_constructor/test_constructor.c @@ -3,7 +3,6 @@ static double value_ = 1; static double static_value_ = 10; -napi_ref constructor_; napi_value GetValue(napi_env env, napi_callback_info info) { size_t argc = 0; @@ -80,8 +79,6 @@ napi_value Init(napi_env env, napi_value exports) { NAPI_CALL(env, napi_define_class(env, "MyObject", NAPI_AUTO_LENGTH, New, NULL, sizeof(properties)/sizeof(*properties), properties, &cons)); - NAPI_CALL(env, napi_create_reference(env, cons, 1, &constructor_)); - return cons; } From 9719b831a3bdf6f92ed7de18ec1677a5a20c75a4 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Tue, 6 Feb 2018 02:00:12 +0200 Subject: [PATCH 204/227] doc: fix typo in n-api.md Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18590 Reviewed-By: Jon Moss Reviewed-By: Weijia Wang Reviewed-By: Colin Ihrig Reviewed-By: Rich Trott Reviewed-By: Daniel Bevenius Reviewed-By: Yuta Hiroto --- doc/api/n-api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 388fa0f95d0086..d965f3a4693884 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3462,7 +3462,7 @@ invoking the callback. This should be a value previously obtained from [`napi_async_init`][]. - `[out] result`: The newly created scope. -There are cases(for example resolving promises) where it is +There are cases (for example resolving promises) where it is necessary to have the equivalent of the scope associated with a callback in place when making certain N-API calls. If there is no other script on the stack the [`napi_open_callback_scope`][] and From 7f37dc9c48513d2bb8d394705317b5a2e41cb2cb Mon Sep 17 00:00:00 2001 From: Bhavani Shankar Date: Thu, 1 Feb 2018 15:32:41 +0530 Subject: [PATCH 205/227] test: improve error message output Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18498 Reviewed-By: Ruben Bridgewater Reviewed-By: Anatoli Papirovski Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Gireesh Punathil Reviewed-By: Michael Dawson --- test/addons-napi/test_general/test.js | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js index 15461cef2f6a36..d43a1c69ba0c43 100644 --- a/test/addons-napi/test_general/test.js +++ b/test/addons-napi/test_general/test.js @@ -63,22 +63,25 @@ let w = {}; test_general.wrap(w); w = null; global.gc(); -assert.strictEqual(test_general.derefItemWasCalled(), true, +const derefItemWasCalled = test_general.derefItemWasCalled(); +assert.strictEqual(derefItemWasCalled, true, 'deref_item() was called upon garbage collecting a ' + - 'wrapped object'); + 'wrapped object. test_general.derefItemWasCalled() ' + + `returned ${derefItemWasCalled}`); + // Assert that wrapping twice fails. const x = {}; test_general.wrap(x); -assert.throws(() => test_general.wrap(x), Error); +common.expectsError(() => test_general.wrap(x), + { type: Error, message: 'Invalid argument' }); // Ensure that wrapping, removing the wrap, and then wrapping again works. const y = {}; test_general.wrap(y); test_general.removeWrap(y); -assert.doesNotThrow(() => test_general.wrap(y), Error, - 'Wrapping twice succeeds if a remove_wrap()' + - ' separates the instances'); +// Wrapping twice succeeds if a remove_wrap() separates the instances +assert.doesNotThrow(() => test_general.wrap(y)); // Ensure that removing a wrap and garbage collecting does not fire the // finalize callback. @@ -87,8 +90,11 @@ test_general.testFinalizeWrap(z); test_general.removeWrap(z); z = null; global.gc(); -assert.strictEqual(test_general.finalizeWasCalled(), false, - 'finalize callback was not called upon garbage collection'); +const finalizeWasCalled = test_general.finalizeWasCalled(); +assert.strictEqual(finalizeWasCalled, false, + 'finalize callback was not called upon garbage collection.' + + ' test_general.finalizeWasCalled() ' + + `returned ${finalizeWasCalled}`); // test napi_adjust_external_memory const adjustedValue = test_general.testAdjustExternalMemory(); From c90b77ed5d3c83e8d02e37e6a158c91644d156d9 Mon Sep 17 00:00:00 2001 From: Jack Horton Date: Mon, 5 Feb 2018 11:00:33 -0800 Subject: [PATCH 206/227] test: convert new tests to use error types Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18581 Reviewed-By: Ruben Bridgewater Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca --- test/addons-napi/test_typedarray/test.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/addons-napi/test_typedarray/test.js b/test/addons-napi/test_typedarray/test.js index 4a4e79ebe7bcdb..4c139d92200fe3 100644 --- a/test/addons-napi/test_typedarray/test.js +++ b/test/addons-napi/test_typedarray/test.js @@ -60,7 +60,7 @@ arrayTypes.forEach((currentType) => { const template = Reflect.construct(currentType, buffer); assert.throws(() => { test_typedarray.CreateTypedArray(template, buffer, 0, 136); - }, /Invalid typed array length/); + }, RangeError); }); const nonByteArrayTypes = [ Int16Array, Uint16Array, Int32Array, Uint32Array, @@ -71,5 +71,5 @@ nonByteArrayTypes.forEach((currentType) => { test_typedarray.CreateTypedArray(template, buffer, currentType.BYTES_PER_ELEMENT + 1, 1); console.log(`start of offset ${currentType}`); - }, /start offset of/); + }, RangeError); }); From e9e5d56121f33f9941ea9a1a77cf1f414fcaa652 Mon Sep 17 00:00:00 2001 From: Aonghus O Nia Date: Thu, 8 Feb 2018 17:01:23 -0500 Subject: [PATCH 207/227] doc: fix exporting a function example MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Missing the length argument in napi_create_function. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18661 Reviewed-By: Michael Dawson Reviewed-By: Ruben Bridgewater Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Tobias Nießen --- doc/api/n-api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index d965f3a4693884..650c4f96baf9eb 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -916,7 +916,7 @@ For example, to set a function to be returned by the `require()` for the addon: napi_value Init(napi_env env, napi_value exports) { napi_value method; napi_status status; - status = napi_create_function(env, "exports", Method, NULL, &method); + status = napi_create_function(env, "exports", NAPI_AUTO_LENGTH, Method, NULL, &method); if (status != napi_ok) return NULL; return method; } From 8826f185b05a02013c4ae5ce1f317936b3c998f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Sat, 10 Feb 2018 16:32:01 +0100 Subject: [PATCH 208/227] doc: mark NAPI_AUTO_LENGTH as code Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18697 Reviewed-By: Ruben Bridgewater Reviewed-By: Colin Ihrig Reviewed-By: Jeremiah Senkpiel Reviewed-By: Luigi Pinca --- doc/api/n-api.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 650c4f96baf9eb..cae1dab65ded70 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -552,10 +552,10 @@ NAPI_NO_RETURN void napi_fatal_error(const char* location, - `[in] location`: Optional location at which the error occurred. - `[in] location_len`: The length of the location in bytes, or -NAPI_AUTO_LENGTH if it is null-terminated. +`NAPI_AUTO_LENGTH` if it is null-terminated. - `[in] message`: The message associated with the error. - `[in] message_len`: The length of the message in bytes, or -NAPI_AUTO_LENGTH if it is +`NAPI_AUTO_LENGTH` if it is null-terminated. The function call does not return, the process will be terminated. @@ -1258,7 +1258,7 @@ napi_status napi_create_function(napi_env env, - `[in] utf8name`: A string representing the name of the function encoded as UTF8. - `[in] length`: The length of the utf8name in bytes, or -NAPI_AUTO_LENGTH if it is null-terminated. +`NAPI_AUTO_LENGTH` if it is null-terminated. - `[in] cb`: A function pointer to the native function to be invoked when the created function is invoked from JavaScript. - `[in] data`: Optional arbitrary context data to be passed into the native @@ -1491,7 +1491,7 @@ napi_status napi_create_string_latin1(napi_env env, - `[in] env`: The environment that the API is invoked under. - `[in] str`: Character buffer representing a ISO-8859-1-encoded string. - `[in] length`: The length of the string in bytes, or -NAPI_AUTO_LENGTH if it is null-terminated. +`NAPI_AUTO_LENGTH` if it is null-terminated. - `[out] result`: A `napi_value` representing a JavaScript String. Returns `napi_ok` if the API succeeded. @@ -1516,7 +1516,7 @@ napi_status napi_create_string_utf16(napi_env env, - `[in] env`: The environment that the API is invoked under. - `[in] str`: Character buffer representing a UTF16-LE-encoded string. - `[in] length`: The length of the string in two-byte code units, or -NAPI_AUTO_LENGTH if it is null-terminated. +`NAPI_AUTO_LENGTH` if it is null-terminated. - `[out] result`: A `napi_value` representing a JavaScript String. Returns `napi_ok` if the API succeeded. @@ -1540,7 +1540,7 @@ napi_status napi_create_string_utf8(napi_env env, - `[in] env`: The environment that the API is invoked under. - `[in] str`: Character buffer representing a UTF8-encoded string. -- `[in] length`: The length of the string in bytes, or NAPI_AUTO_LENGTH +- `[in] length`: The length of the string in bytes, or `NAPI_AUTO_LENGTH` if it is null-terminated. - `[out] result`: A `napi_value` representing a JavaScript String. @@ -3066,7 +3066,7 @@ napi_status napi_define_class(napi_env env, - `[in] utf8name`: Name of the JavaScript constructor function; this is not required to be the same as the C++ class name, though it is recommended for clarity. - - `[in] length`: The length of the utf8name in bytes, or NAPI_AUTO_LENGTH + - `[in] length`: The length of the utf8name in bytes, or `NAPI_AUTO_LENGTH` if it is null-terminated. - `[in] constructor`: Callback function that handles constructing instances of the class. (This should be a static method on the class, not an actual From 7c58045470bb27aef9df7d6fe9163bd5efe52397 Mon Sep 17 00:00:00 2001 From: cjihrig Date: Sun, 11 Feb 2018 17:07:39 -0500 Subject: [PATCH 209/227] test: remove unnecessary timer The timer in NAPI's test_callback_scope/test-resolve-async.js can be removed. If the test fails, it will timeout on its own. The extra timer increases the chances of the test being flaky. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/18719 Fixes: https://github.com/nodejs/node/issues/18702 Reviewed-By: Ruben Bridgewater Reviewed-By: Santiago Gimeno Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- .../test_callback_scope/test-resolve-async.js | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/test/addons-napi/test_callback_scope/test-resolve-async.js b/test/addons-napi/test_callback_scope/test-resolve-async.js index e9f4b9044c0154..77f25c9dde533f 100644 --- a/test/addons-napi/test_callback_scope/test-resolve-async.js +++ b/test/addons-napi/test_callback_scope/test-resolve-async.js @@ -1,13 +1,6 @@ 'use strict'; const common = require('../../common'); -const assert = require('assert'); const { testResolveAsync } = require(`./build/${common.buildType}/binding`); -let called = false; -testResolveAsync().then(common.mustCall(() => { - called = true; -})); - -setTimeout(common.mustCall(() => { assert(called); }), - common.platformTimeout(20)); +testResolveAsync().then(common.mustCall()); From d2463745a705a941db26429b89fae1607bca70c1 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Tue, 27 Feb 2018 13:02:24 -0500 Subject: [PATCH 210/227] n-api: fix object test Passing a pointer to a static integer is sufficient for the test. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/19039 Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson Reviewed-By: Hitesh Kanwathirtha --- test/addons-napi/test_object/test_object.c | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/test/addons-napi/test_object/test_object.c b/test/addons-napi/test_object/test_object.c index 75e0ee4c30f85c..a84da27365ff2e 100644 --- a/test/addons-napi/test_object/test_object.c +++ b/test/addons-napi/test_object/test_object.c @@ -1,7 +1,6 @@ #include #include "../common.h" #include -#include static int test_value = 3; @@ -199,9 +198,7 @@ napi_value Wrap(napi_env env, napi_callback_info info) { napi_value arg; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &arg, NULL, NULL)); - int32_t* data = malloc(sizeof(int32_t)); - *data = test_value; - NAPI_CALL(env, napi_wrap(env, arg, data, NULL, NULL, NULL)); + NAPI_CALL(env, napi_wrap(env, arg, &test_value, NULL, NULL, NULL)); return NULL; } From 10fe65a0d528b0781c9fa7637d81c3ff5698ce65 Mon Sep 17 00:00:00 2001 From: Eric Bickle Date: Thu, 1 Mar 2018 09:27:16 -0800 Subject: [PATCH 211/227] doc: fix n-api asynchronous threading docs Documentation for N-API Custom Asynchronous Operations incorrectly stated that async execution happens on the main event loop. Added details to napi_create_async_work about which threads are used to invoke the execute and complete callbacks. Changed 'async' to 'asynchronous' in the documentation for Custom Asynchronous Operations. Changed "executes in parallel" to "can execute in parallel" for the documentation of napi_create_async_work execute parameter. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/19073 Fixes: https://github.com/nodejs/node/issues/19071 Reviewed-By: Michael Dawson Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater --- doc/api/n-api.md | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index cae1dab65ded70..4c555554405331 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3281,9 +3281,11 @@ napi_status napi_create_async_work(napi_env env, - `[in] async_resource_name`: Identifier for the kind of resource that is being provided for diagnostic information exposed by the `async_hooks` API. - `[in] execute`: The native function which should be called to excute -the logic asynchronously. +the logic asynchronously. The given function is called from a worker pool +thread and can execute in parallel with the main event loop thread. - `[in] complete`: The native function which will be called when the -asynchronous logic is comple or is cancelled. +asynchronous logic is completed or is cancelled. The given function is called +from the main event loop thread. - `[in] data`: User-provided data context. This will be passed back into the execute and complete functions. - `[out] result`: `napi_async_work*` which is the handle to the newly created @@ -3359,9 +3361,9 @@ callback invocation, even if it has been successfully cancelled. ## Custom Asynchronous Operations The simple asynchronous work APIs above may not be appropriate for every -scenario, because with those the async execution still happens on the main -event loop. When using any other async mechanism, the following APIs are -necessary to ensure an async operation is properly tracked by the runtime. +scenario. When using any other asynchronous mechanism, the following APIs +are necessary to ensure an asynchronous operation is properly tracked by +the runtime. ### napi_async_init +```C +napi_status napi_fatal_exception(napi_env env, napi_value err); +``` + +- `[in] env`: The environment that the API is invoked under. +- `[in] err`: The error you want to pass to `uncaughtException`. + +Trigger an `uncaughtException` in JavaScript. Useful if an async +callback throws an exception with no way to recover. + ### Fatal Errors In the event of an unrecoverable error in a native module, a fatal error can be diff --git a/src/node_api.cc b/src/node_api.cc index b153aa5f3c3bf6..a52ecef27c4c27 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -293,6 +293,13 @@ v8::Local V8LocalValueFromJsValue(napi_value v) { return local; } +static inline void trigger_fatal_exception( + napi_env env, v8::Local local_err) { + v8::Local local_msg = + v8::Exception::CreateMessage(env->isolate, local_err); + node::FatalException(env->isolate, local_err, local_msg); +} + static inline napi_status V8NameFromPropertyDescriptor(napi_env env, const napi_property_descriptor* p, v8::Local* result) { @@ -967,6 +974,16 @@ napi_status napi_get_last_error_info(napi_env env, return napi_ok; } +napi_status napi_fatal_exception(napi_env env, napi_value err) { + NAPI_PREAMBLE(env); + CHECK_ARG(env, err); + + v8::Local local_err = v8impl::V8LocalValueFromJsValue(err); + v8impl::trigger_fatal_exception(env, local_err); + + return napi_clear_last_error(env); +} + NAPI_NO_RETURN void napi_fatal_error(const char* location, size_t location_len, const char* message, @@ -3457,10 +3474,9 @@ class Work : public node::AsyncResource { // report it as a fatal exception. (There is no JavaScript on the // callstack that can possibly handle it.) if (!env->last_exception.IsEmpty()) { - v8::TryCatch try_catch(env->isolate); - env->isolate->ThrowException( - v8::Local::New(env->isolate, env->last_exception)); - node::FatalException(env->isolate, try_catch); + v8::Local local_err = v8::Local::New( + env->isolate, env->last_exception); + v8impl::trigger_fatal_exception(env, local_err); } } } diff --git a/src/node_api.h b/src/node_api.h index c5b8a2a30cfe15..a2a73d5bcfd8f5 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -103,6 +103,8 @@ NAPI_EXTERN napi_status napi_get_last_error_info(napi_env env, const napi_extended_error_info** result); +NAPI_EXTERN napi_status napi_fatal_exception(napi_env env, napi_value err); + NAPI_EXTERN NAPI_NO_RETURN void napi_fatal_error(const char* location, size_t location_len, const char* message, diff --git a/src/node_internals.h b/src/node_internals.h index de1b8adc87f4d3..bfb9bf296d7285 100644 --- a/src/node_internals.h +++ b/src/node_internals.h @@ -118,6 +118,11 @@ void GetSockOrPeerName(const v8::FunctionCallbackInfo& args) { args.GetReturnValue().Set(err); } +void FatalException(v8::Isolate* isolate, + v8::Local error, + v8::Local message); + + void SignalExit(int signo); #ifdef __POSIX__ void RegisterSignalHandler(int signal, diff --git a/test/addons-napi/test_async/test-uncaught.js b/test/addons-napi/test_async/test-uncaught.js new file mode 100644 index 00000000000000..fdcb3203f54410 --- /dev/null +++ b/test/addons-napi/test_async/test-uncaught.js @@ -0,0 +1,18 @@ +'use strict'; +const common = require('../../common'); +const assert = require('assert'); +const test_async = require(`./build/${common.buildType}/test_async`); + +process.on('uncaughtException', common.mustCall(function(err) { + try { + throw new Error('should not fail'); + } catch (err) { + assert.strictEqual(err.message, 'should not fail'); + } + assert.strictEqual(err.message, 'uncaught'); +})); + +// Successful async execution and completion callback. +test_async.Test(5, {}, common.mustCall(function() { + throw new Error('uncaught'); +})); diff --git a/test/addons-napi/test_fatal_exception/binding.gyp b/test/addons-napi/test_fatal_exception/binding.gyp new file mode 100644 index 00000000000000..f4dc0a71ea2817 --- /dev/null +++ b/test/addons-napi/test_fatal_exception/binding.gyp @@ -0,0 +1,8 @@ +{ + "targets": [ + { + "target_name": "test_fatal_exception", + "sources": [ "test_fatal_exception.c" ] + } + ] +} diff --git a/test/addons-napi/test_fatal_exception/test.js b/test/addons-napi/test_fatal_exception/test.js new file mode 100644 index 00000000000000..f02b9bce1e8169 --- /dev/null +++ b/test/addons-napi/test_fatal_exception/test.js @@ -0,0 +1,11 @@ +'use strict'; +const common = require('../../common'); +const assert = require('assert'); +const test_fatal = require(`./build/${common.buildType}/test_fatal_exception`); + +process.on('uncaughtException', common.mustCall(function(err) { + assert.strictEqual(err.message, 'fatal error'); +})); + +const err = new Error('fatal error'); +test_fatal.Test(err); diff --git a/test/addons-napi/test_fatal_exception/test_fatal_exception.c b/test/addons-napi/test_fatal_exception/test_fatal_exception.c new file mode 100644 index 00000000000000..fd81c56d856db8 --- /dev/null +++ b/test/addons-napi/test_fatal_exception/test_fatal_exception.c @@ -0,0 +1,26 @@ +#include +#include "../common.h" + +napi_value Test(napi_env env, napi_callback_info info) { + napi_value err; + size_t argc = 1; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &err, NULL, NULL)); + + NAPI_CALL(env, napi_fatal_exception(env, err)); + + return NULL; +} + +napi_value Init(napi_env env, napi_value exports) { + napi_property_descriptor properties[] = { + DECLARE_NAPI_PROPERTY("Test", Test), + }; + + NAPI_CALL(env, napi_define_properties( + env, exports, sizeof(properties) / sizeof(*properties), properties)); + + return exports; +} + +NAPI_MODULE(NODE_GYP_MODULE_NAME, Init) From c16a705416c216ee9bb90add9dff08d502e794f6 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Mon, 19 Mar 2018 11:59:41 -0400 Subject: [PATCH 221/227] n-api: re-write test_make_callback This re-writes the test in C by dropping std::vector in favour of a C99 variable length array, and by dropping the anonymous namespace in favour of static function declarations. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/19448 Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Michael Dawson --- .../{binding.cc => binding.c} | 21 +++++++++---------- .../test_make_callback/binding.gyp | 2 +- 2 files changed, 11 insertions(+), 12 deletions(-) rename test/addons-napi/test_make_callback/{binding.cc => binding.c} (79%) diff --git a/test/addons-napi/test_make_callback/binding.cc b/test/addons-napi/test_make_callback/binding.c similarity index 79% rename from test/addons-napi/test_make_callback/binding.cc rename to test/addons-napi/test_make_callback/binding.c index 18e558aecf8d33..f893319987cb89 100644 --- a/test/addons-napi/test_make_callback/binding.cc +++ b/test/addons-napi/test_make_callback/binding.c @@ -1,13 +1,13 @@ #include #include "../common.h" -#include -namespace { +#define MAX_ARGUMENTS 10 +static napi_value MakeCallback(napi_env env, napi_callback_info info) { - const int kMaxArgs = 10; - size_t argc = kMaxArgs; - napi_value args[kMaxArgs]; + size_t argc = MAX_ARGUMENTS; + size_t n; + napi_value args[MAX_ARGUMENTS]; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); NAPI_ASSERT(env, argc > 0, "Wrong number of arguments"); @@ -15,9 +15,9 @@ napi_value MakeCallback(napi_env env, napi_callback_info info) { napi_value recv = args[0]; napi_value func = args[1]; - std::vector argv; - for (size_t n = 2; n < argc; n += 1) { - argv.push_back(args[n]); + napi_value argv[MAX_ARGUMENTS - 2]; + for (n = 2; n < argc; n += 1) { + argv[n - 2] = args[n]; } napi_valuetype func_type; @@ -34,7 +34,7 @@ napi_value MakeCallback(napi_env env, napi_callback_info info) { napi_value result; if (func_type == napi_function) { NAPI_CALL(env, napi_make_callback( - env, context, recv, func, argv.size(), argv.data(), &result)); + env, context, recv, func, argc - 2, argv, &result)); } else { NAPI_ASSERT(env, false, "Unexpected argument type"); } @@ -44,6 +44,7 @@ napi_value MakeCallback(napi_env env, napi_callback_info info) { return result; } +static napi_value Init(napi_env env, napi_value exports) { napi_value fn; NAPI_CALL(env, napi_create_function( @@ -52,6 +53,4 @@ napi_value Init(napi_env env, napi_value exports) { return exports; } -} // namespace - NAPI_MODULE(binding, Init) diff --git a/test/addons-napi/test_make_callback/binding.gyp b/test/addons-napi/test_make_callback/binding.gyp index 7ede63d94a0d77..23daf507916ff6 100644 --- a/test/addons-napi/test_make_callback/binding.gyp +++ b/test/addons-napi/test_make_callback/binding.gyp @@ -3,7 +3,7 @@ { 'target_name': 'binding', 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ], - 'sources': [ 'binding.cc' ] + 'sources': [ 'binding.c' ] } ] } From 71acb5205aebad0f03d00af1f7f9daaf5b780aba Mon Sep 17 00:00:00 2001 From: jiangq Date: Fri, 23 Mar 2018 22:28:39 +0800 Subject: [PATCH 222/227] doc: Add a missing comma Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/19555 Reviewed-By: Colin Ihrig Reviewed-By: Trivikram Kamat --- doc/api/n-api.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 4245c28fb046c0..e176cbdf1ef9cd 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -608,7 +608,7 @@ that has a loop which iterates through the elements in a large array: ```C for (int i = 0; i < 1000000; i++) { napi_value result; - napi_status status = napi_get_element(e object, i, &result); + napi_status status = napi_get_element(e, object, i, &result); if (status != napi_ok) { break; } @@ -645,7 +645,7 @@ for (int i = 0; i < 1000000; i++) { break; } napi_value result; - status = napi_get_element(e object, i, &result); + status = napi_get_element(e, object, i, &result); if (status != napi_ok) { break; } From c6d0a66ef2f74b3d76b6c738f7fb7ac8710c6b9d Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Tue, 20 Mar 2018 17:14:24 -0400 Subject: [PATCH 223/227] n-api: bump version of n-api supported Bump the version due to additions to the api. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/19497 Reviewed-By: Colin Ihrig Reviewed-By: Trivikram Kamat Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater --- src/node_version.h | 2 +- test/addons-napi/test_general/test.js | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/node_version.h b/src/node_version.h index 843e5b524d533f..e71ed35478e3b7 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -62,6 +62,6 @@ #define NODE_MODULE_VERSION 48 /* Node.js v6.0.0 */ // the NAPI_VERSION provided by this version of the runtime -#define NAPI_VERSION 2 +#define NAPI_VERSION 3 #endif // SRC_NODE_VERSION_H_ diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js index d43a1c69ba0c43..4faf508d5db145 100644 --- a/test/addons-napi/test_general/test.js +++ b/test/addons-napi/test_general/test.js @@ -32,9 +32,9 @@ assert.strictEqual(test_general.testGetPrototype(extendedObject), assert.notStrictEqual(test_general.testGetPrototype(baseObject), test_general.testGetPrototype(extendedObject)); -// test version management funcitons -// expected version is currently 1 -assert.strictEqual(test_general.testGetVersion(), 2); +// test version management functions +// expected version is currently 3 +assert.strictEqual(test_general.testGetVersion(), 3); const [ major, minor, patch, release ] = test_general.testGetNodeVersion(); assert.strictEqual(process.version.split('-')[0], From e6ccdfbde3735b4af6049aff31c2ca4f3a4eb369 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Thu, 22 Mar 2018 17:01:37 -0400 Subject: [PATCH 224/227] n-api: ensure in-module exceptions are propagated Whenever we call into an addon, whether it is for a callback, for module init, or for async work-related reasons, we should make sure that * the last error is cleared, * the scopes before the call are the same as after, and * if an exception was thrown and captured inside the module, then it is re-thrown after the call. Therefore we should call into the module in a unified fashion. This change introduces the macro NAPI_CALL_INTO_MODULE() which should be used whenever invoking a callback provided by the module. Fixes: https://github.com/nodejs/node/issues/19437 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/19537 Reviewed-By: James M Snell Reviewed-By: Michael Dawson --- src/node_api.cc | 79 ++++++++++--------- test/addons-napi/test_exception/test.js | 30 ++++++- .../test_exception/test_exception.c | 42 ++++++++-- 3 files changed, 108 insertions(+), 43 deletions(-) diff --git a/src/node_api.cc b/src/node_api.cc index a52ecef27c4c27..e4638e4559af97 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -168,6 +168,24 @@ struct napi_env__ { (out) = v8::type::New((buffer), (byte_offset), (length)); \ } while (0) +#define NAPI_CALL_INTO_MODULE(env, call, handle_exception) \ + do { \ + int open_handle_scopes = (env)->open_handle_scopes; \ + int open_callback_scopes = (env)->open_callback_scopes; \ + napi_clear_last_error((env)); \ + call; \ + CHECK_EQ((env)->open_handle_scopes, open_handle_scopes); \ + CHECK_EQ((env)->open_callback_scopes, open_callback_scopes); \ + if (!(env)->last_exception.IsEmpty()) { \ + handle_exception( \ + v8::Local::New((env)->isolate, (env)->last_exception)); \ + (env)->last_exception.Reset(); \ + } \ + } while (0) + +#define NAPI_CALL_INTO_MODULE_THROW(env, call) \ + NAPI_CALL_INTO_MODULE((env), call, (env)->isolate->ThrowException) + namespace v8impl { // convert from n-api property attributes to v8::PropertyAttribute @@ -349,10 +367,11 @@ class Finalizer { static void FinalizeBufferCallback(char* data, void* hint) { Finalizer* finalizer = static_cast(hint); if (finalizer->_finalize_callback != nullptr) { - finalizer->_finalize_callback( - finalizer->_env, - data, - finalizer->_finalize_hint); + NAPI_CALL_INTO_MODULE_THROW(finalizer->_env, + finalizer->_finalize_callback( + finalizer->_env, + data, + finalizer->_finalize_hint)); } Delete(finalizer); @@ -464,10 +483,11 @@ class Reference : private Finalizer { bool delete_self = reference->_delete_self; if (reference->_finalize_callback != nullptr) { - reference->_finalize_callback( - reference->_env, - reference->_finalize_data, - reference->_finalize_hint); + NAPI_CALL_INTO_MODULE_THROW(reference->_env, + reference->_finalize_callback( + reference->_env, + reference->_finalize_data, + reference->_finalize_hint)); } if (delete_self) { @@ -552,32 +572,17 @@ class CallbackWrapperBase : public CallbackWrapper { napi_callback cb = reinterpret_cast( v8::Local::Cast( _cbdata->GetInternalField(kInternalFieldIndex))->Value()); - v8::Isolate* isolate = _cbinfo.GetIsolate(); napi_env env = static_cast( v8::Local::Cast( _cbdata->GetInternalField(kEnvIndex))->Value()); - // Make sure any errors encountered last time we were in N-API are gone. - napi_clear_last_error(env); - - int open_handle_scopes = env->open_handle_scopes; - int open_callback_scopes = env->open_callback_scopes; - - napi_value result = cb(env, cbinfo_wrapper); + napi_value result; + NAPI_CALL_INTO_MODULE_THROW(env, result = cb(env, cbinfo_wrapper)); if (result != nullptr) { this->SetReturnValue(result); } - - CHECK_EQ(env->open_handle_scopes, open_handle_scopes); - CHECK_EQ(env->open_callback_scopes, open_callback_scopes); - - if (!env->last_exception.IsEmpty()) { - isolate->ThrowException( - v8::Local::New(isolate, env->last_exception)); - env->last_exception.Reset(); - } } const Info& _cbinfo; @@ -885,8 +890,10 @@ void napi_module_register_cb(v8::Local exports, // one is found. napi_env env = v8impl::GetEnv(context); - napi_value _exports = - mod->nm_register_func(env, v8impl::JsValueFromV8LocalValue(exports)); + napi_value _exports; + NAPI_CALL_INTO_MODULE_THROW(env, + _exports = mod->nm_register_func(env, + v8impl::JsValueFromV8LocalValue(exports))); // If register function returned a non-null exports object different from // the exports object we passed it, set that as the "exports" property of @@ -3465,19 +3472,17 @@ class Work : public node::AsyncResource { v8::HandleScope scope(env->isolate); CallbackScope callback_scope(work); - work->_complete(env, ConvertUVErrorCode(status), work->_data); + NAPI_CALL_INTO_MODULE(env, + work->_complete(env, ConvertUVErrorCode(status), work->_data), + [env] (v8::Local local_err) { + // If there was an unhandled exception in the complete callback, + // report it as a fatal exception. (There is no JavaScript on the + // callstack that can possibly handle it.) + v8impl::trigger_fatal_exception(env, local_err); + }); // Note: Don't access `work` after this point because it was // likely deleted by the complete callback. - - // If there was an unhandled exception in the complete callback, - // report it as a fatal exception. (There is no JavaScript on the - // callstack that can possibly handle it.) - if (!env->last_exception.IsEmpty()) { - v8::Local local_err = v8::Local::New( - env->isolate, env->last_exception); - v8impl::trigger_fatal_exception(env, local_err); - } } } diff --git a/test/addons-napi/test_exception/test.js b/test/addons-napi/test_exception/test.js index 787b7d78b1c72b..b9311add6c92d7 100644 --- a/test/addons-napi/test_exception/test.js +++ b/test/addons-napi/test_exception/test.js @@ -1,10 +1,26 @@ 'use strict'; +// Flags: --expose-gc const common = require('../../common'); -const test_exception = require(`./build/${common.buildType}/test_exception`); const assert = require('assert'); const theError = new Error('Some error'); +// The test module throws an error during Init, but in order for its exports to +// not be lost, it attaches them to the error's "bindings" property. This way, +// we can make sure that exceptions thrown during the module initialization +// phase are propagated through require() into JavaScript. +// https://github.com/nodejs/node/issues/19437 +const test_exception = (function() { + let resultingException; + try { + require(`./build/${common.buildType}/test_exception`); + } catch (anException) { + resultingException = anException; + } + assert.strictEqual(resultingException.message, 'Error during Init'); + return resultingException.binding; +})(); + { const throwTheError = () => { throw theError; }; @@ -50,3 +66,15 @@ const theError = new Error('Some error'); 'Exception state did not remain clear as expected,' + ` .wasPending() returned ${exception_pending}`); } + +// Make sure that exceptions that occur during finalization are propagated. +function testFinalize(binding) { + let x = test_exception[binding](); + x = null; + assert.throws(() => { global.gc(); }, /Error during Finalize/); + + // To assuage the linter's concerns. + (function() {})(x); +} +testFinalize('createExternal'); +testFinalize('createExternalBuffer'); diff --git a/test/addons-napi/test_exception/test_exception.c b/test/addons-napi/test_exception/test_exception.c index 2ab01b653becfb..c83fd5ebcade19 100644 --- a/test/addons-napi/test_exception/test_exception.c +++ b/test/addons-napi/test_exception/test_exception.c @@ -3,7 +3,7 @@ static bool exceptionWasPending = false; -napi_value returnException(napi_env env, napi_callback_info info) { +static napi_value returnException(napi_env env, napi_callback_info info) { size_t argc = 1; napi_value args[1]; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); @@ -22,7 +22,7 @@ napi_value returnException(napi_env env, napi_callback_info info) { return NULL; } -napi_value allowException(napi_env env, napi_callback_info info) { +static napi_value allowException(napi_env env, napi_callback_info info) { size_t argc = 1; napi_value args[1]; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); @@ -38,23 +38,55 @@ napi_value allowException(napi_env env, napi_callback_info info) { return NULL; } -napi_value wasPending(napi_env env, napi_callback_info info) { +static napi_value wasPending(napi_env env, napi_callback_info info) { napi_value result; NAPI_CALL(env, napi_get_boolean(env, exceptionWasPending, &result)); return result; } -napi_value Init(napi_env env, napi_value exports) { +static void finalizer(napi_env env, void *data, void *hint) { + NAPI_CALL_RETURN_VOID(env, + napi_throw_error(env, NULL, "Error during Finalize")); +} + +static napi_value createExternal(napi_env env, napi_callback_info info) { + napi_value external; + + NAPI_CALL(env, + napi_create_external(env, NULL, finalizer, NULL, &external)); + + return external; +} + +static char buffer_data[12]; + +static napi_value createExternalBuffer(napi_env env, napi_callback_info info) { + napi_value buffer; + NAPI_CALL(env, napi_create_external_buffer(env, sizeof(buffer_data), + buffer_data, finalizer, NULL, &buffer)); + return buffer; +} + +static napi_value Init(napi_env env, napi_value exports) { napi_property_descriptor descriptors[] = { DECLARE_NAPI_PROPERTY("returnException", returnException), DECLARE_NAPI_PROPERTY("allowException", allowException), DECLARE_NAPI_PROPERTY("wasPending", wasPending), + DECLARE_NAPI_PROPERTY("createExternal", createExternal), + DECLARE_NAPI_PROPERTY("createExternalBuffer", createExternalBuffer), }; - NAPI_CALL(env, napi_define_properties( env, exports, sizeof(descriptors) / sizeof(*descriptors), descriptors)); + napi_value error, code, message; + NAPI_CALL(env, napi_create_string_utf8(env, "Error during Init", + NAPI_AUTO_LENGTH, &message)); + NAPI_CALL(env, napi_create_string_utf8(env, "", NAPI_AUTO_LENGTH, &code)); + NAPI_CALL(env, napi_create_error(env, code, message, &error)); + NAPI_CALL(env, napi_set_named_property(env, error, "binding", exports)); + NAPI_CALL(env, napi_throw(env, error)); + return exports; } From abd9fd67976e1da0ffc7deb90832a5d5b7511fe0 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Sat, 31 Mar 2018 21:37:17 -0400 Subject: [PATCH 225/227] n-api: back up env before finalize Heed the comment to not use fields of a Reference after calling its finalize callback, because such a call may destroy the Reference. Fixes: https://github.com/nodejs/node/issues/19673 Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/19718 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- src/node_api.cc | 3 ++- test/addons-napi/8_passing_wrapped/binding.cc | 15 ++++++++++++--- test/addons-napi/8_passing_wrapped/myobject.cc | 12 +++++++++++- test/addons-napi/8_passing_wrapped/test.js | 9 ++++++++- 4 files changed, 33 insertions(+), 6 deletions(-) diff --git a/src/node_api.cc b/src/node_api.cc index e4638e4559af97..a13398006b652d 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -481,9 +481,10 @@ class Reference : private Finalizer { // Check before calling the finalize callback, because the callback might // delete it. bool delete_self = reference->_delete_self; + napi_env env = reference->_env; if (reference->_finalize_callback != nullptr) { - NAPI_CALL_INTO_MODULE_THROW(reference->_env, + NAPI_CALL_INTO_MODULE_THROW(env, reference->_finalize_callback( reference->_env, reference->_finalize_data, diff --git a/test/addons-napi/8_passing_wrapped/binding.cc b/test/addons-napi/8_passing_wrapped/binding.cc index 86a6e4d739f03d..f38861d54d8dc2 100644 --- a/test/addons-napi/8_passing_wrapped/binding.cc +++ b/test/addons-napi/8_passing_wrapped/binding.cc @@ -1,7 +1,9 @@ #include "myobject.h" #include "../common.h" -napi_value CreateObject(napi_env env, napi_callback_info info) { +extern size_t finalize_count; + +static napi_value CreateObject(napi_env env, napi_callback_info info) { size_t argc = 1; napi_value args[1]; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, nullptr, nullptr)); @@ -12,7 +14,7 @@ napi_value CreateObject(napi_env env, napi_callback_info info) { return instance; } -napi_value Add(napi_env env, napi_callback_info info) { +static napi_value Add(napi_env env, napi_callback_info info) { size_t argc = 2; napi_value args[2]; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, nullptr, nullptr)); @@ -29,12 +31,19 @@ napi_value Add(napi_env env, napi_callback_info info) { return sum; } -napi_value Init(napi_env env, napi_value exports) { +static napi_value FinalizeCount(napi_env env, napi_callback_info info) { + napi_value return_value; + NAPI_CALL(env, napi_create_uint32(env, finalize_count, &return_value)); + return return_value; +} + +static napi_value Init(napi_env env, napi_value exports) { MyObject::Init(env); napi_property_descriptor desc[] = { DECLARE_NAPI_PROPERTY("createObject", CreateObject), DECLARE_NAPI_PROPERTY("add", Add), + DECLARE_NAPI_PROPERTY("finalizeCount", FinalizeCount), }; NAPI_CALL(env, diff --git a/test/addons-napi/8_passing_wrapped/myobject.cc b/test/addons-napi/8_passing_wrapped/myobject.cc index 19cc7dd2a29493..0c9ca90f52f8f3 100644 --- a/test/addons-napi/8_passing_wrapped/myobject.cc +++ b/test/addons-napi/8_passing_wrapped/myobject.cc @@ -1,9 +1,14 @@ #include "myobject.h" #include "../common.h" +size_t finalize_count = 0; + MyObject::MyObject() : env_(nullptr), wrapper_(nullptr) {} -MyObject::~MyObject() { napi_delete_reference(env_, wrapper_); } +MyObject::~MyObject() { + finalize_count++; + napi_delete_reference(env_, wrapper_); +} void MyObject::Destructor( napi_env env, void* nativeObject, void* /*finalize_hint*/) { @@ -45,6 +50,11 @@ napi_value MyObject::New(napi_env env, napi_callback_info info) { } obj->env_ = env; + + // It is important that the below call to napi_wrap() be such that we request + // a reference to the wrapped object via the out-parameter, because this + // ensures that we test the code path that deals with a reference that is + // destroyed from its own finalizer. NAPI_CALL(env, napi_wrap(env, _this, obj, diff --git a/test/addons-napi/8_passing_wrapped/test.js b/test/addons-napi/8_passing_wrapped/test.js index 3d24fa5d9fdaa7..7793133f7750ba 100644 --- a/test/addons-napi/8_passing_wrapped/test.js +++ b/test/addons-napi/8_passing_wrapped/test.js @@ -1,9 +1,16 @@ 'use strict'; +// Flags: --expose-gc + const common = require('../../common'); const assert = require('assert'); const addon = require(`./build/${common.buildType}/binding`); -const obj1 = addon.createObject(10); +let obj1 = addon.createObject(10); const obj2 = addon.createObject(20); const result = addon.add(obj1, obj2); assert.strictEqual(result, 30); + +// Make sure the native destructor gets called. +obj1 = null; +global.gc(); +assert.strictEqual(addon.finalizeCount(), 1); From 3aa5b7d939409bb5abab0fd4478e44f4e643e297 Mon Sep 17 00:00:00 2001 From: Kyle Farnung Date: Thu, 15 Mar 2018 17:22:30 -0700 Subject: [PATCH 226/227] n-api: add more `int64_t` tests * Updated tests for `Number` and `int32_t` * Added new tests for `int64_t` * Updated N-API `int64_t` behavior to return zero for all non-finite numbers * Clarified the documentation for these calls. Backport-PR-URL: https://github.com/nodejs/node/pull/19447 PR-URL: https://github.com/nodejs/node/pull/19402 Refs: https://github.com/nodejs/node-chakracore/pull/500 Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- doc/api/n-api.md | 27 ++++-- src/node_api.cc | 23 +++-- test/addons-napi/test_number/test.js | 135 ++++++++++++++++++++------- 3 files changed, 135 insertions(+), 50 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index e176cbdf1ef9cd..38d7b22f558ec2 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -1806,13 +1806,17 @@ napi_status napi_get_value_int32(napi_env env, - `[out] result`: C int32 primitive equivalent of the given JavaScript Number. Returns `napi_ok` if the API succeeded. If a non-number `napi_value` -is passed in `napi_number_expected . +is passed in `napi_number_expected`. This API returns the C int32 primitive equivalent -of the given JavaScript Number. If the number exceeds the range of the -32 bit integer, then the result is truncated to the equivalent of the -bottom 32 bits. This can result in a large positive number becoming -a negative number if the value is > 2^31 -1. +of the given JavaScript Number. + +If the number exceeds the range of the 32 bit integer, then the result is +truncated to the equivalent of the bottom 32 bits. This can result in a large +positive number becoming a negative number if the value is > 2^31 -1. + +Non-finite number values (NaN, positive infinity, or negative infinity) set the +result to zero. #### napi_get_value_int64 ```C napi_status napi_fatal_exception(napi_env env, napi_value err); @@ -560,7 +560,7 @@ thrown to immediately terminate the process. #### napi_fatal_error ```C NAPI_NO_RETURN void napi_fatal_error(const char* location, @@ -1383,7 +1383,7 @@ of the ECMAScript Language Specification. #### napi_create_dataview ```C @@ -1417,7 +1417,7 @@ JavaScript DataView Objects are described in ### Functions to convert from C types to N-API #### napi_create_int32 ```C napi_status napi_create_int32(napi_env env, int32_t value, napi_value* result) @@ -1438,7 +1438,7 @@ of the ECMAScript Language Specification. #### napi_create_uint32 ```C napi_status napi_create_uint32(napi_env env, uint32_t value, napi_value* result) @@ -1459,7 +1459,7 @@ of the ECMAScript Language Specification. #### napi_create_int64 ```C napi_status napi_create_int64(napi_env env, int64_t value, napi_value* result) @@ -1486,7 +1486,7 @@ outside the range of #### napi_create_double ```C napi_status napi_create_double(napi_env env, double value, napi_value* result) @@ -1706,7 +1706,7 @@ is managed by the VM #### napi_get_dataview_info ```C @@ -2239,7 +2239,7 @@ This API checks if the Object passed in is a typed array. ### napi_is_dataview ```C @@ -2573,7 +2573,7 @@ This API checks if the Object passed in has the named property. #### napi_delete_property ```C napi_status napi_delete_property(napi_env env, @@ -2595,7 +2595,7 @@ This API attempts to delete the `key` own property from `object`. #### napi_has_own_property ```C napi_status napi_has_own_property(napi_env env, @@ -2742,7 +2742,7 @@ requested index. #### napi_delete_element ```C napi_status napi_delete_element(napi_env env, @@ -2970,7 +2970,7 @@ call like the arguments and the `this` pointer from a given callback info. ### napi_get_new_target ```C napi_status napi_get_new_target(napi_env env, @@ -3224,7 +3224,7 @@ then by calling `napi_unwrap()` on the wrapper object. ### napi_remove_wrap ```C napi_status napi_remove_wrap(napi_env env, @@ -3296,7 +3296,7 @@ callback invocation, even when it was cancelled. @@ -3406,7 +3406,7 @@ the runtime. ### napi_async_init ```C napi_status napi_async_init(napi_env env, @@ -3427,7 +3427,7 @@ Returns `napi_ok` if the API succeeded. ### napi_async_destroy ```C napi_status napi_async_destroy(napi_env env, @@ -3445,7 +3445,7 @@ This API can be called even if there is a pending JavaScript exception. ```C @@ -3489,7 +3489,7 @@ may be required when implementing custom async behavior that does not use ### *napi_open_callback_scope* ```C NAPI_EXTERN napi_status napi_open_callback_scope(napi_env env, @@ -3514,7 +3514,7 @@ the required scope. ### *napi_close_callback_scope* ```C NAPI_EXTERN napi_status napi_close_callback_scope(napi_env env, @@ -3529,7 +3529,7 @@ This API can be called even if there is a pending JavaScript exception. ### napi_get_node_version ```C @@ -3557,7 +3557,7 @@ The returned buffer is statically allocated and does not need to be freed. ### napi_get_version ```C napi_status napi_get_version(napi_env env, @@ -3587,7 +3587,7 @@ support it: ### napi_adjust_external_memory ```C NAPI_EXTERN napi_status napi_adjust_external_memory(napi_env env, @@ -3666,7 +3666,7 @@ deferred = NULL; ### napi_create_promise ```C napi_status napi_create_promise(napi_env env, @@ -3686,7 +3686,7 @@ This API creates a deferred object and a JavaScript promise. ### napi_resolve_deferred ```C napi_status napi_resolve_deferred(napi_env env, @@ -3709,7 +3709,7 @@ The deferred object is freed upon successful completion. ### napi_reject_deferred ```C napi_status napi_reject_deferred(napi_env env, @@ -3732,7 +3732,7 @@ The deferred object is freed upon successful completion. ### napi_is_promise ```C napi_status napi_is_promise(napi_env env, @@ -3752,7 +3752,7 @@ underlying JavaScript engine. ### napi_run_script ```C NAPI_EXTERN napi_status napi_run_script(napi_env env, @@ -3771,7 +3771,7 @@ a specific `napi_env`. ### napi_get_uv_event_loop ```C NAPI_EXTERN napi_status napi_get_uv_event_loop(napi_env env, diff --git a/doc/changelogs/CHANGELOG_V6.md b/doc/changelogs/CHANGELOG_V6.md index f84e43d8398c85..5b1a998e3f6034 100644 --- a/doc/changelogs/CHANGELOG_V6.md +++ b/doc/changelogs/CHANGELOG_V6.md @@ -7,6 +7,7 @@ +6.14.2
6.14.1
6.14.0
6.13.1
@@ -62,6 +63,244 @@ [Node.js Long Term Support Plan](https://github.com/nodejs/LTS) and will be supported actively until April 2018 and maintained until April 2019. + +## 2018-04-30, Version 6.14.2 'Boron' (LTS), @MylesBorins + +### Notable Changes + +* **n-api**: + - n-api has been backported to v6.x. It is being landed as an experimental interface, + and as such is landing in a Semver-Patch release. (Gabriel Schulhof) [#19447](https://github.com/nodejs/node/pull/19447) + +### Commits + +* [[`6ba38e8c2b`](https://github.com/nodejs/node/commit/6ba38e8c2b)] - **N-API**: Reuse ObjectTemplate instances (Gabriel Schulhof) [#13999](https://github.com/nodejs/node/pull/13999) +* [[`49d8c2e8ae`](https://github.com/nodejs/node/commit/49d8c2e8ae)] - **build**: refine static and shared lib build (Yihong Wang) [#17604](https://github.com/nodejs/node/pull/17604) +* [[`cc7469eec8`](https://github.com/nodejs/node/commit/cc7469eec8)] - **build**: allow x86\_64 as a dest\_cpu alias for x64 (Rod Vagg) [#18052](https://github.com/nodejs/node/pull/18052) +* [[`969398d08e`](https://github.com/nodejs/node/commit/969398d08e)] - **crypto**: reuse variable instead of reevaluation (Tobias Nießen) [#17735](https://github.com/nodejs/node/pull/17735) +* [[`71acb5205a`](https://github.com/nodejs/node/commit/71acb5205a)] - **doc**: Add a missing comma (jiangq) [#19555](https://github.com/nodejs/node/pull/19555) +* [[`b9b752ef07`](https://github.com/nodejs/node/commit/b9b752ef07)] - **doc**: fix typos on n-api (Kyle Robinson Young) [#19385](https://github.com/nodejs/node/pull/19385) +* [[`10fe65a0d5`](https://github.com/nodejs/node/commit/10fe65a0d5)] - **doc**: fix n-api asynchronous threading docs (Eric Bickle) [#19073](https://github.com/nodejs/node/pull/19073) +* [[`8826f185b0`](https://github.com/nodejs/node/commit/8826f185b0)] - **doc**: mark NAPI\_AUTO\_LENGTH as code (Tobias Nießen) [#18697](https://github.com/nodejs/node/pull/18697) +* [[`e9e5d56121`](https://github.com/nodejs/node/commit/e9e5d56121)] - **doc**: fix exporting a function example (Aonghus O Nia) [#18661](https://github.com/nodejs/node/pull/18661) +* [[`9719b831a3`](https://github.com/nodejs/node/commit/9719b831a3)] - **doc**: fix typo in n-api.md (Vse Mozhet Byt) [#18590](https://github.com/nodejs/node/pull/18590) +* [[`fdd50fb35f`](https://github.com/nodejs/node/commit/fdd50fb35f)] - **doc**: small typo in n-api.md (iskore) [#18555](https://github.com/nodejs/node/pull/18555) +* [[`24a2791173`](https://github.com/nodejs/node/commit/24a2791173)] - **doc**: remove usage of you in n-api doc (Michael Dawson) [#18528](https://github.com/nodejs/node/pull/18528) +* [[`74086e19f2`](https://github.com/nodejs/node/commit/74086e19f2)] - **doc**: remove uannecessary Require (Michael Dawson) [#18184](https://github.com/nodejs/node/pull/18184) +* [[`fed2136857`](https://github.com/nodejs/node/commit/fed2136857)] - **doc**: napi: make header style consistent (Ali Ijaz Sheikh) [#18122](https://github.com/nodejs/node/pull/18122) +* [[`e04386a363`](https://github.com/nodejs/node/commit/e04386a363)] - **doc**: napi: fix unbalanced emphasis (Ali Ijaz Sheikh) [#18122](https://github.com/nodejs/node/pull/18122) +* [[`3d8e1aaf48`](https://github.com/nodejs/node/commit/3d8e1aaf48)] - **doc**: updates examples to use NULL (Michael Dawson) [#18008](https://github.com/nodejs/node/pull/18008) +* [[`173f29763e`](https://github.com/nodejs/node/commit/173f29763e)] - **doc**: update example in module registration (Franziska Hinkelmann) [#17424](https://github.com/nodejs/node/pull/17424) +* [[`c6852126fd`](https://github.com/nodejs/node/commit/c6852126fd)] - **doc**: use "JavaScript" instead of "Javascript" (Rich Trott) [#17163](https://github.com/nodejs/node/pull/17163) +* [[`35dc8bab9e`](https://github.com/nodejs/node/commit/35dc8bab9e)] - **doc**: document common pattern for instanceof checks (Michael Dawson) [#16699](https://github.com/nodejs/node/pull/16699) +* [[`22490dcb91`](https://github.com/nodejs/node/commit/22490dcb91)] - **doc**: fix typos in N-API (Swathi Kalahastri) [#16911](https://github.com/nodejs/node/pull/16911) +* [[`55fabd7337`](https://github.com/nodejs/node/commit/55fabd7337)] - **doc**: fix a typo in n-api documentation (Vipin Menon) [#16879](https://github.com/nodejs/node/pull/16879) +* [[`0c67f21bcf`](https://github.com/nodejs/node/commit/0c67f21bcf)] - **doc**: update to use NAPI\_AUTO\_LENGTH (Michael Dawson) [#16187](https://github.com/nodejs/node/pull/16187) +* [[`5c2bba0931`](https://github.com/nodejs/node/commit/5c2bba0931)] - **doc**: fix some links (Vse Mozhet Byt) [#16202](https://github.com/nodejs/node/pull/16202) +* [[`e9a6dffc65`](https://github.com/nodejs/node/commit/e9a6dffc65)] - **doc**: fix outdated code sample in n-api.md (rebornix) [#15581](https://github.com/nodejs/node/pull/15581) +* [[`ca69f1dfe7`](https://github.com/nodejs/node/commit/ca69f1dfe7)] - **doc**: fix new nits in links (Vse Mozhet Byt) [#15449](https://github.com/nodejs/node/pull/15449) +* [[`a766802bee`](https://github.com/nodejs/node/commit/a766802bee)] - **doc**: fix doc for napi\_get\_value\_string\_utf8 (Daniel Taveras) [#14529](https://github.com/nodejs/node/pull/14529) +* [[`b0f09a2ee6`](https://github.com/nodejs/node/commit/b0f09a2ee6)] - **doc**: added napi\_get\_value\_string\_latin1 (Kyle Farnung) [#14678](https://github.com/nodejs/node/pull/14678) +* [[`fbcc962727`](https://github.com/nodejs/node/commit/fbcc962727)] - **doc**: delint (Refael Ackermann) [#14707](https://github.com/nodejs/node/pull/14707) +* [[`831de617b0`](https://github.com/nodejs/node/commit/831de617b0)] - **doc**: document napi\_finalize() signature (cjihrig) [#14230](https://github.com/nodejs/node/pull/14230) +* [[`4b9773effa`](https://github.com/nodejs/node/commit/4b9773effa)] - **doc**: fix some links (Vse Mozhet Byt) [#14400](https://github.com/nodejs/node/pull/14400) +* [[`36185b343b`](https://github.com/nodejs/node/commit/36185b343b)] - **doc**: doc lifetime of n-api last error info (Michael Dawson) [#13939](https://github.com/nodejs/node/pull/13939) +* [[`cc3a4af7c8`](https://github.com/nodejs/node/commit/cc3a4af7c8)] - **doc**: fix a few n-api doc issues (Michael Dawson) [#13650](https://github.com/nodejs/node/pull/13650) +* [[`1e91d5804d`](https://github.com/nodejs/node/commit/1e91d5804d)] - **doc**: fix out of date napi\_callback doc (XadillaX) [#13570](https://github.com/nodejs/node/pull/13570) +* [[`c5ae39e401`](https://github.com/nodejs/node/commit/c5ae39e401)] - **doc**: fix napi\_create\_\*\_error signatures in n-api (Jamen Marzonie) [#13544](https://github.com/nodejs/node/pull/13544) +* [[`35a3cbb5dd`](https://github.com/nodejs/node/commit/35a3cbb5dd)] - **doc**: fix out of date sections in n-api doc (Michael Dawson) [#13508](https://github.com/nodejs/node/pull/13508) +* [[`a06cc4684f`](https://github.com/nodejs/node/commit/a06cc4684f)] - **doc**: fix typo "ndapi" in n-api.md (Jamen Marz) [#13484](https://github.com/nodejs/node/pull/13484) +* [[`82f31ff4af`](https://github.com/nodejs/node/commit/82f31ff4af)] - **doc**: add ref to option to enable n-api (Michael Dawson) [#13406](https://github.com/nodejs/node/pull/13406) +* [[`17fe21e83d`](https://github.com/nodejs/node/commit/17fe21e83d)] - **doc**: fix typo in n-api.md (JongChan Choi) [#13323](https://github.com/nodejs/node/pull/13323) +* [[`2e2905266e`](https://github.com/nodejs/node/commit/2e2905266e)] - **doc**: fix title/function name mismatch (Michael Dawson) [#13123](https://github.com/nodejs/node/pull/13123) +* [[`75e91fe5c8`](https://github.com/nodejs/node/commit/75e91fe5c8)] - **doc**: add reference to node\_api.h in docs (Michael Dawson) [#13084](https://github.com/nodejs/node/pull/13084) +* [[`0f74ee5cbf`](https://github.com/nodejs/node/commit/0f74ee5cbf)] - **doc**: clarify operation of napi\_cancel\_async\_work (Michael Dawson) [#12974](https://github.com/nodejs/node/pull/12974) +* [[`5b045374ed`](https://github.com/nodejs/node/commit/5b045374ed)] - **doc**: clarify node.js addons are c++ (Beth Griggs) [#12898](https://github.com/nodejs/node/pull/12898) +* [[`6bcd6d49d5`](https://github.com/nodejs/node/commit/6bcd6d49d5)] - **doc**: fix broken links in n-api doc (Michael Dawson) [#12889](https://github.com/nodejs/node/pull/12889) +* [[`3e388cf819`](https://github.com/nodejs/node/commit/3e388cf819)] - **doc**: Add initial documentation for N-API (Michael Dawson) [#12549](https://github.com/nodejs/node/pull/12549) +* [[`4d67369c1b`](https://github.com/nodejs/node/commit/4d67369c1b)] - **doc**: fix various nits (Vse Mozhet Byt) [#19743](https://github.com/nodejs/node/pull/19743) +* [[`057c80b088`](https://github.com/nodejs/node/commit/057c80b088)] - **doc**: move Fedor to TSC Emeritus (Myles Borins) [#18752](https://github.com/nodejs/node/pull/18752) +* [[`bf72ee667e`](https://github.com/nodejs/node/commit/bf72ee667e)] - **doc**: add mmarchini to collaborators (Matheus Marchini) [#18740](https://github.com/nodejs/node/pull/18740) +* [[`280af052d8`](https://github.com/nodejs/node/commit/280af052d8)] - **doc**: add history for url.parse (Steven) [#18685](https://github.com/nodejs/node/pull/18685) +* [[`29b0d3b104`](https://github.com/nodejs/node/commit/29b0d3b104)] - **doc**: add devsnek to collaborators (Gus Caplan) [#18679](https://github.com/nodejs/node/pull/18679) +* [[`dc6dc8232f`](https://github.com/nodejs/node/commit/dc6dc8232f)] - **doc**: add section for strategic initiatives (Michael Dawson) [#17104](https://github.com/nodejs/node/pull/17104) +* [[`6b348d4483`](https://github.com/nodejs/node/commit/6b348d4483)] - **doc**: modify the return value of request.write() (陈刚) [#18526](https://github.com/nodejs/node/pull/18526) +* [[`dd4d075e51`](https://github.com/nodejs/node/commit/dd4d075e51)] - **doc**: be more explicit in the sypnosis (Tim O. Peters) [#17977](https://github.com/nodejs/node/pull/17977) +* [[`0067bccf6f`](https://github.com/nodejs/node/commit/0067bccf6f)] - **doc**: fix description of createDecipheriv (Tobias Nießen) [#18651](https://github.com/nodejs/node/pull/18651) +* [[`bc2f0a5120`](https://github.com/nodejs/node/commit/bc2f0a5120)] - **doc**: linkify missing types (Vse Mozhet Byt) [#18444](https://github.com/nodejs/node/pull/18444) +* [[`32089bcbc1`](https://github.com/nodejs/node/commit/32089bcbc1)] - **doc**: streamline README intro (Rich Trott) [#18483](https://github.com/nodejs/node/pull/18483) +* [[`43839f1601`](https://github.com/nodejs/node/commit/43839f1601)] - **doc**: move Brian White to TSC Emeriti list (Rich Trott) [#18482](https://github.com/nodejs/node/pull/18482) +* [[`27d3c1a0f4`](https://github.com/nodejs/node/commit/27d3c1a0f4)] - **doc**: add Gibson Fahnestock to TSC (Rich Trott) [#18481](https://github.com/nodejs/node/pull/18481) +* [[`67fd520539`](https://github.com/nodejs/node/commit/67fd520539)] - **doc**: reorder section on updating PR branch (Ali Ijaz Sheikh) [#18355](https://github.com/nodejs/node/pull/18355) +* [[`f81a69aefe`](https://github.com/nodejs/node/commit/f81a69aefe)] - **fs**: fix `createReadStream(…, {end: n})` for non-seekable fds (Anna Henningsen) [#19329](https://github.com/nodejs/node/pull/19329) +* [[`18acad349c`](https://github.com/nodejs/node/commit/18acad349c)] - **http**: make socketPath work with no agent (Luigi Pinca) [#19425](https://github.com/nodejs/node/pull/19425) +* [[`1edadebaa0`](https://github.com/nodejs/node/commit/1edadebaa0)] - **http**: allow \_httpMessage to be GC'ed (Luigi Pinca) [#18865](https://github.com/nodejs/node/pull/18865) +* [[`dbe70b744c`](https://github.com/nodejs/node/commit/dbe70b744c)] - **http**: free the parser before emitting 'upgrade' (Luigi Pinca) [#18209](https://github.com/nodejs/node/pull/18209) +* [[`77a405b92f`](https://github.com/nodejs/node/commit/77a405b92f)] - **lib**: set process.execPath on OpenBSD (Aaron Bieber) [#18543](https://github.com/nodejs/node/pull/18543) +* [[`3aa5b7d939`](https://github.com/nodejs/node/commit/3aa5b7d939)] - **n-api**: add more `int64\_t` tests (Kyle Farnung) [#19402](https://github.com/nodejs/node/pull/19402) +* [[`abd9fd6797`](https://github.com/nodejs/node/commit/abd9fd6797)] - **n-api**: back up env before finalize (Gabriel Schulhof) [#19718](https://github.com/nodejs/node/pull/19718) +* [[`e6ccdfbde3`](https://github.com/nodejs/node/commit/e6ccdfbde3)] - **n-api**: ensure in-module exceptions are propagated (Gabriel Schulhof) [#19537](https://github.com/nodejs/node/pull/19537) +* [[`c6d0a66ef2`](https://github.com/nodejs/node/commit/c6d0a66ef2)] - **n-api**: bump version of n-api supported (Michael Dawson) [#19497](https://github.com/nodejs/node/pull/19497) +* [[`c16a705416`](https://github.com/nodejs/node/commit/c16a705416)] - **n-api**: re-write test\_make\_callback (Gabriel Schulhof) [#19448](https://github.com/nodejs/node/pull/19448) +* [[`49cd4fad89`](https://github.com/nodejs/node/commit/49cd4fad89)] - **n-api**: add napi\_fatal\_exception (Mathias Buus) [#19337](https://github.com/nodejs/node/pull/19337) +* [[`eb29266878`](https://github.com/nodejs/node/commit/eb29266878)] - **n-api**: add missing exception checking (Michael Dawson) [#19362](https://github.com/nodejs/node/pull/19362) +* [[`2c1190a93d`](https://github.com/nodejs/node/commit/2c1190a93d)] - **n-api**: take n-api out of experimental (Michael Dawson) [#19262](https://github.com/nodejs/node/pull/19262) +* [[`ce1447920e`](https://github.com/nodejs/node/commit/ce1447920e)] - **n-api**: resolve promise in test (Gabriel Schulhof) [#19245](https://github.com/nodejs/node/pull/19245) +* [[`a8237efaf1`](https://github.com/nodejs/node/commit/a8237efaf1)] - **n-api**: update documentation (Gabriel Schulhof) [#19078](https://github.com/nodejs/node/pull/19078) +* [[`af62c8fff7`](https://github.com/nodejs/node/commit/af62c8fff7)] - **n-api**: update reference test (Gabriel Schulhof) [#19086](https://github.com/nodejs/node/pull/19086) +* [[`d2463745a7`](https://github.com/nodejs/node/commit/d2463745a7)] - **n-api**: fix object test (Gabriel Schulhof) [#19039](https://github.com/nodejs/node/pull/19039) +* [[`516c287f8e`](https://github.com/nodejs/node/commit/516c287f8e)] - **n-api**: remove extra reference from test (Gabriel Schulhof) [#18542](https://github.com/nodejs/node/pull/18542) +* [[`a8dec487f7`](https://github.com/nodejs/node/commit/a8dec487f7)] - **n-api**: add methods to open/close callback scope (Michael Dawson) [#18089](https://github.com/nodejs/node/pull/18089) +* [[`c09a7134e7`](https://github.com/nodejs/node/commit/c09a7134e7)] - **n-api**: wrap control flow macro in do/while (Ben Noordhuis) [#18532](https://github.com/nodejs/node/pull/18532) +* [[`b565ba2d82`](https://github.com/nodejs/node/commit/b565ba2d82)] - **n-api**: implement wrapping using private properties (Gabriel Schulhof) [#18311](https://github.com/nodejs/node/pull/18311) +* [[`d9df8cfe77`](https://github.com/nodejs/node/commit/d9df8cfe77)] - **n-api**: change assert ok check to notStrictEqual. (Aaron Kau) [#18414](https://github.com/nodejs/node/pull/18414) +* [[`2e24a0bfe7`](https://github.com/nodejs/node/commit/2e24a0bfe7)] - **n-api**: throw RangeError napi\_create\_typedarray() (Jinho Bang) [#18037](https://github.com/nodejs/node/pull/18037) +* [[`62427bbed9`](https://github.com/nodejs/node/commit/62427bbed9)] - **n-api**: expose n-api version in process.versions (Michael Dawson) [#18067](https://github.com/nodejs/node/pull/18067) +* [[`bb99f31f30`](https://github.com/nodejs/node/commit/bb99f31f30)] - **n-api**: throw RangeError in napi\_create\_dataview() with invalid range (Jinho Bang) [#17869](https://github.com/nodejs/node/pull/17869) +* [[`65ea7abd55`](https://github.com/nodejs/node/commit/65ea7abd55)] - **n-api**: fix memory leak in napi\_async\_destroy() (alnyan) [#17714](https://github.com/nodejs/node/pull/17714) +* [[`d4284a464b`](https://github.com/nodejs/node/commit/d4284a464b)] - **n-api**: use nullptr instead of NULL in node\_api.cc (Daniel Bevenius) [#17276](https://github.com/nodejs/node/pull/17276) +* [[`f4391b95ee`](https://github.com/nodejs/node/commit/f4391b95ee)] - **n-api**: add helper for addons to get the event loop (Anna Henningsen) [#17109](https://github.com/nodejs/node/pull/17109) +* [[`3c84db624a`](https://github.com/nodejs/node/commit/3c84db624a)] - **n-api**: unexpose symbols and remove EXTERNAL\_NAPI (Gabriel Schulhof) [#16234](https://github.com/nodejs/node/pull/16234) +* [[`55aab6bf01`](https://github.com/nodejs/node/commit/55aab6bf01)] - **n-api**: check against invalid handle scope usage (Anna Henningsen) [#16201](https://github.com/nodejs/node/pull/16201) +* [[`169b53e788`](https://github.com/nodejs/node/commit/169b53e788)] - **n-api**: use module name macro (Michael Dawson) [#16185](https://github.com/nodejs/node/pull/16185) +* [[`32412a8ded`](https://github.com/nodejs/node/commit/32412a8ded)] - **n-api**: make changes for source compatibility (Gabriel Schulhof) [#16102](https://github.com/nodejs/node/pull/16102) +* [[`00d094f9c3`](https://github.com/nodejs/node/commit/00d094f9c3)] - **n-api**: add check for large strings (Michael Dawson) [#15611](https://github.com/nodejs/node/pull/15611) +* [[`2bc8a59915`](https://github.com/nodejs/node/commit/2bc8a59915)] - **n-api**: fix warning about size\_t compare with int (Sampson Gao) [#15508](https://github.com/nodejs/node/pull/15508) +* [[`5e29823d1d`](https://github.com/nodejs/node/commit/5e29823d1d)] - **n-api**: remove n-api module loading flag (Gabriel Schulhof) [#14902](https://github.com/nodejs/node/pull/14902) +* [[`f31b50cfc7`](https://github.com/nodejs/node/commit/f31b50cfc7)] - **n-api**: add optional string length parameters (Sampson Gao) [#15343](https://github.com/nodejs/node/pull/15343) +* [[`fe87a5944b`](https://github.com/nodejs/node/commit/fe87a5944b)] - **n-api**: napi\_is\_construct\_call-\>napi\_get\_new\_target (Sampson Gao) [#14698](https://github.com/nodejs/node/pull/14698) +* [[`5eadd6249d`](https://github.com/nodejs/node/commit/5eadd6249d)] - **n-api**: Context for custom async operations (Jason Ginchereau) [#15189](https://github.com/nodejs/node/pull/15189) +* [[`50cb48b55c`](https://github.com/nodejs/node/commit/50cb48b55c)] - **n-api**: refactor napi\_addon\_register\_func (Taylor Woll) [#15088](https://github.com/nodejs/node/pull/15088) +* [[`156a8b6069`](https://github.com/nodejs/node/commit/156a8b6069)] - **n-api**: change async resource name to napi\_value (Jason Ginchereau) [#14697](https://github.com/nodejs/node/pull/14697) +* [[`7588eead2a`](https://github.com/nodejs/node/commit/7588eead2a)] - **n-api**: use AsyncResource for Work tracking (Anna Henningsen) [#14697](https://github.com/nodejs/node/pull/14697) +* [[`676cff48bd`](https://github.com/nodejs/node/commit/676cff48bd)] - **n-api**: stop creating references to primitives (Gabriel Schulhof) [#15289](https://github.com/nodejs/node/pull/15289) +* [[`3b4708b025`](https://github.com/nodejs/node/commit/3b4708b025)] - **n-api**: implement napi\_run\_script (Gabriel Schulhof) [#15216](https://github.com/nodejs/node/pull/15216) +* [[`ac5b904808`](https://github.com/nodejs/node/commit/ac5b904808)] - **n-api**: adds function to adjust external memory (Chris Young) [#14310](https://github.com/nodejs/node/pull/14310) +* [[`278a2d069f`](https://github.com/nodejs/node/commit/278a2d069f)] - **n-api**: implement promise (Gabriel Schulhof) [#14365](https://github.com/nodejs/node/pull/14365) +* [[`73cc251f50`](https://github.com/nodejs/node/commit/73cc251f50)] - **n-api**: add ability to remove a wrapping (Gabriel Schulhof) [#14658](https://github.com/nodejs/node/pull/14658) +* [[`951adbef3d`](https://github.com/nodejs/node/commit/951adbef3d)] - **n-api**: add napi\_get\_node\_version (Anna Henningsen) [#14696](https://github.com/nodejs/node/pull/14696) +* [[`b29eb693a0`](https://github.com/nodejs/node/commit/b29eb693a0)] - **n-api**: optimize number API performance (Jason Ginchereau) [#14573](https://github.com/nodejs/node/pull/14573) +* [[`bd032a158a`](https://github.com/nodejs/node/commit/bd032a158a)] - **n-api**: add support for DataView (Shivanth MP) [#14382](https://github.com/nodejs/node/pull/14382) +* [[`86b101cb60`](https://github.com/nodejs/node/commit/86b101cb60)] - **n-api**: re-use napi\_env between modules (Gabriel Schulhof) [#14217](https://github.com/nodejs/node/pull/14217) +* [[`1acab66df4`](https://github.com/nodejs/node/commit/1acab66df4)] - **n-api**: directly create Local from Persistent (Kyle Farnung) [#14211](https://github.com/nodejs/node/pull/14211) +* [[`f4d1cae634`](https://github.com/nodejs/node/commit/f4d1cae634)] - **n-api**: add fast paths for integer getters (Anna Henningsen) [#14393](https://github.com/nodejs/node/pull/14393) +* [[`aad36b2cd4`](https://github.com/nodejs/node/commit/aad36b2cd4)] - **n-api**: add napi\_fatal\_error API (Kyle Farnung) [#13971](https://github.com/nodejs/node/pull/13971) +* [[`57be12ed97`](https://github.com/nodejs/node/commit/57be12ed97)] - **n-api**: add code parameter to error helpers (Michael Dawson) [#13988](https://github.com/nodejs/node/pull/13988) +* [[`cd3015408e`](https://github.com/nodejs/node/commit/cd3015408e)] - **n-api**: wrap test macros in do/while (Kyle Farnung) [#14095](https://github.com/nodejs/node/pull/14095) +* [[`7973bd3e63`](https://github.com/nodejs/node/commit/7973bd3e63)] - **n-api**: Implement stricter wrapping (Gabriel Schulhof) [#13872](https://github.com/nodejs/node/pull/13872) +* [[`5b0c57cfeb`](https://github.com/nodejs/node/commit/5b0c57cfeb)] - **n-api**: fix warning in test\_general (Daniel Bevenius) [#14104](https://github.com/nodejs/node/pull/14104) +* [[`a5517d80bb`](https://github.com/nodejs/node/commit/a5517d80bb)] - **n-api**: add napi\_has\_own\_property() (cjihrig) [#14063](https://github.com/nodejs/node/pull/14063) +* [[`8e2a26d3d0`](https://github.com/nodejs/node/commit/8e2a26d3d0)] - **n-api**: fix -Wmaybe-uninitialized compiler warning (Ben Noordhuis) [#14053](https://github.com/nodejs/node/pull/14053) +* [[`33821c3087`](https://github.com/nodejs/node/commit/33821c3087)] - **n-api**: use Maybe version of Object::SetPrototype() (Ben Noordhuis) [#14053](https://github.com/nodejs/node/pull/14053) +* [[`80cf25a8a5`](https://github.com/nodejs/node/commit/80cf25a8a5)] - **n-api**: add napi\_delete\_property() (cjihrig) [#13934](https://github.com/nodejs/node/pull/13934) +* [[`cadec3b37e`](https://github.com/nodejs/node/commit/cadec3b37e)] - **n-api**: add napi\_delete\_element() (cjihrig) [#13949](https://github.com/nodejs/node/pull/13949) +* [[`97b628ba8e`](https://github.com/nodejs/node/commit/97b628ba8e)] - **n-api**: fix section title typo (Kyle Farnung) [#13972](https://github.com/nodejs/node/pull/13972) +* [[`c3eb187bd9`](https://github.com/nodejs/node/commit/c3eb187bd9)] - **n-api**: avoid crash in napi\_escape\_scope() (Michael Dawson) [#13651](https://github.com/nodejs/node/pull/13651) +* [[`919556f27a`](https://github.com/nodejs/node/commit/919556f27a)] - **n-api**: enable napi\_wrap() to work with any object (Jason Ginchereau) [#13250](https://github.com/nodejs/node/pull/13250) +* [[`86c0ebf4e2`](https://github.com/nodejs/node/commit/86c0ebf4e2)] - **n-api**: add napi\_get\_version (Michael Dawson) [#13207](https://github.com/nodejs/node/pull/13207) +* [[`70281ba1be`](https://github.com/nodejs/node/commit/70281ba1be)] - **n-api**: Retain last code when getting error info (Jason Ginchereau) [#13087](https://github.com/nodejs/node/pull/13087) +* [[`8d3162d9e6`](https://github.com/nodejs/node/commit/8d3162d9e6)] - **n-api**: remove compiler warning (Anna Henningsen) [#13014](https://github.com/nodejs/node/pull/13014) +* [[`a128219a48`](https://github.com/nodejs/node/commit/a128219a48)] - **n-api**: Handle fatal exception in async callback (Jason Ginchereau) [#12838](https://github.com/nodejs/node/pull/12838) +* [[`2e36365d56`](https://github.com/nodejs/node/commit/2e36365d56)] - **n-api**: napi\_get\_cb\_info should fill array (Jason Ginchereau) [#12863](https://github.com/nodejs/node/pull/12863) +* [[`7507d1e0e6`](https://github.com/nodejs/node/commit/7507d1e0e6)] - **n-api**: remove unnecessary try-catch bracket from certain APIs (Gabriel Schulhof) [#12705](https://github.com/nodejs/node/pull/12705) +* [[`49d74c648d`](https://github.com/nodejs/node/commit/49d74c648d)] - **n-api**: Sync with back-compat changes (Jason Ginchereau) [#12674](https://github.com/nodejs/node/pull/12674) +* [[`bc252509ca`](https://github.com/nodejs/node/commit/bc252509ca)] - **n-api**: Reference and external tests (Jason Ginchereau) [#12551](https://github.com/nodejs/node/pull/12551) +* [[`c560db9ece`](https://github.com/nodejs/node/commit/c560db9ece)] - **n-api**: Enable scope and ref APIs during exception (Jason Ginchereau) [#12524](https://github.com/nodejs/node/pull/12524) +* [[`8287e7671a`](https://github.com/nodejs/node/commit/8287e7671a)] - **n-api**: tighten null-checking and clean up last error (Gabriel Schulhof) [#12539](https://github.com/nodejs/node/pull/12539) +* [[`f5cfa09ca4`](https://github.com/nodejs/node/commit/f5cfa09ca4)] - **n-api**: remove napi\_get\_value\_string\_length() (Jason Ginchereau) [#12496](https://github.com/nodejs/node/pull/12496) +* [[`c44f6ffc3c`](https://github.com/nodejs/node/commit/c44f6ffc3c)] - **n-api**: fix coverity scan report (Michael Dawson) [#12365](https://github.com/nodejs/node/pull/12365) +* [[`9bf8e9d48c`](https://github.com/nodejs/node/commit/9bf8e9d48c)] - **n-api**: add string api for latin1 encoding (Sampson Gao) [#12368](https://github.com/nodejs/node/pull/12368) +* [[`eb51d42d2b`](https://github.com/nodejs/node/commit/eb51d42d2b)] - **n-api**: fix -Wmismatched-tags compiler warning (Ben Noordhuis) [#12333](https://github.com/nodejs/node/pull/12333) +* [[`d82fd2a9a0`](https://github.com/nodejs/node/commit/d82fd2a9a0)] - **n-api**: implement async helper methods (taylor.woll) [#12250](https://github.com/nodejs/node/pull/12250) +* [[`c127b71526`](https://github.com/nodejs/node/commit/c127b71526)] - **n-api**: change napi\_callback to return napi\_value (Taylor Woll) [#12248](https://github.com/nodejs/node/pull/12248) +* [[`2a726223ea`](https://github.com/nodejs/node/commit/2a726223ea)] - **n-api**: cache Symbol.hasInstance (Gabriel Schulhof) [#12246](https://github.com/nodejs/node/pull/12246) +* [[`db36ca5f91`](https://github.com/nodejs/node/commit/db36ca5f91)] - **n-api**: Update property attrs enum to match JS spec (Jason Ginchereau) [#12240](https://github.com/nodejs/node/pull/12240) +* [[`1e6d3bb841`](https://github.com/nodejs/node/commit/1e6d3bb841)] - **n-api**: create napi\_env as a real structure (Gabriel Schulhof) [#12195](https://github.com/nodejs/node/pull/12195) +* [[`f1bdbd17d0`](https://github.com/nodejs/node/commit/f1bdbd17d0)] - **n-api**: break dep between v8 and napi attributes (Michael Dawson) [#12191](https://github.com/nodejs/node/pull/12191) +* [[`a9562fe30c`](https://github.com/nodejs/node/commit/a9562fe30c)] - **n-api**: add support for abi stable module API (Jason Ginchereau) [#11975](https://github.com/nodejs/node/pull/11975) +* [[`aa0fb7761e`](https://github.com/nodejs/node/commit/aa0fb7761e)] - **n-api,test**: add int64 bounds tests (Kyle Farnung) [#19309](https://github.com/nodejs/node/pull/19309) +* [[`3f6d80e25c`](https://github.com/nodejs/node/commit/3f6d80e25c)] - **n-api,test**: add a new.target test to addons-napi (Taylor Woll) [#19236](https://github.com/nodejs/node/pull/19236) +* [[`011b53e28f`](https://github.com/nodejs/node/commit/011b53e28f)] - **n-api,test**: use module name macro (Gabriel Schulhof) [#16146](https://github.com/nodejs/node/pull/16146) +* [[`a6af97f76c`](https://github.com/nodejs/node/commit/a6af97f76c)] - **napi**: initialize and check status properly (Gabriel Schulhof) [#12283](https://github.com/nodejs/node/pull/12283) +* [[`9b36811d8e`](https://github.com/nodejs/node/commit/9b36811d8e)] - **napi**: supress invalid coverity leak message (Michael Dawson) [#12192](https://github.com/nodejs/node/pull/12192) +* [[`269c2f3ad9`](https://github.com/nodejs/node/commit/269c2f3ad9)] - **net**: remove redundant code from \_writeGeneric() (Luigi Pinca) [#18429](https://github.com/nodejs/node/pull/18429) +* [[`988cca841e`](https://github.com/nodejs/node/commit/988cca841e)] - **process**: fix reading zero-length env vars on win32 (Anna Henningsen) [#18463](https://github.com/nodejs/node/pull/18463) +* [[`72a5710b71`](https://github.com/nodejs/node/commit/72a5710b71)] - **readline**: update references to archived repository (Tobias Nießen) [#17924](https://github.com/nodejs/node/pull/17924) +* [[`b20c278a7c`](https://github.com/nodejs/node/commit/b20c278a7c)] - **src**: add napi\_handle\_scope\_mismatch to msg list (neta) [#17161](https://github.com/nodejs/node/pull/17161) +* [[`0ef0b342e9`](https://github.com/nodejs/node/commit/0ef0b342e9)] - **src**: replace assert with CHECK\_LE in node\_api.cc (Ben Noordhuis) [#14514](https://github.com/nodejs/node/pull/14514) +* [[`a8c73748db`](https://github.com/nodejs/node/commit/a8c73748db)] - **src**: correct endif comment SRC\_NODE\_API\_H\_\_ (Daniel Bevenius) [#13190](https://github.com/nodejs/node/pull/13190) +* [[`0ca2dad3a6`](https://github.com/nodejs/node/commit/0ca2dad3a6)] - **src**: free memory before re-setting URLHost value (Ivan Filenko) [#18357](https://github.com/nodejs/node/pull/18357) +* [[`e54b8e8184`](https://github.com/nodejs/node/commit/e54b8e8184)] - **stream**: cleanup() when unpiping all streams. (陈刚) [#18266](https://github.com/nodejs/node/pull/18266) +* [[`8ab8d6afd6`](https://github.com/nodejs/node/commit/8ab8d6afd6)] - **stream**: fix y.pipe(x)+y.pipe(x)+y.unpipe(x) (Anna Henningsen) [#12746](https://github.com/nodejs/node/pull/12746) +* [[`8f830ca896`](https://github.com/nodejs/node/commit/8f830ca896)] - **stream**: remove unreachable code (Luigi Pinca) [#18239](https://github.com/nodejs/node/pull/18239) +* [[`64c83d7da9`](https://github.com/nodejs/node/commit/64c83d7da9)] - **stream**: simplify `src.\_readableState` to `state` (陈刚) [#18264](https://github.com/nodejs/node/pull/18264) +* [[`7c58045470`](https://github.com/nodejs/node/commit/7c58045470)] - **test**: remove unnecessary timer (cjihrig) [#18719](https://github.com/nodejs/node/pull/18719) +* [[`c90b77ed5d`](https://github.com/nodejs/node/commit/c90b77ed5d)] - **test**: convert new tests to use error types (Jack Horton) [#18581](https://github.com/nodejs/node/pull/18581) +* [[`7f37dc9c48`](https://github.com/nodejs/node/commit/7f37dc9c48)] - **test**: improve error message output (Bhavani Shankar) [#18498](https://github.com/nodejs/node/pull/18498) +* [[`59249a1768`](https://github.com/nodejs/node/commit/59249a1768)] - **test**: show pending exception error in napi tests (Ben Wilcox) [#18413](https://github.com/nodejs/node/pull/18413) +* [[`eceb70b584`](https://github.com/nodejs/node/commit/eceb70b584)] - **test**: refactor addons-napi/test\_exception/test.js (Rich Trott) [#18340](https://github.com/nodejs/node/pull/18340) +* [[`b3806ecd39`](https://github.com/nodejs/node/commit/b3806ecd39)] - **test**: fixed typos in napi test (furstenheim) [#18148](https://github.com/nodejs/node/pull/18148) +* [[`a6c277e2eb`](https://github.com/nodejs/node/commit/a6c277e2eb)] - **test**: remove ambiguous error messages from test\_error (Nicholas Drane) [#17812](https://github.com/nodejs/node/pull/17812) +* [[`412cc17748`](https://github.com/nodejs/node/commit/412cc17748)] - **test**: remove literals that obscure assert messages (Rich Trott) [#17642](https://github.com/nodejs/node/pull/17642) +* [[`86ddd03608`](https://github.com/nodejs/node/commit/86ddd03608)] - **test**: add unhandled rejection guard (babygoat) [#17275](https://github.com/nodejs/node/pull/17275) +* [[`e54b58c024`](https://github.com/nodejs/node/commit/e54b58c024)] - **test**: replace assert.throws with common.expectsError (Leko) [#17445](https://github.com/nodejs/node/pull/17445) +* [[`976f32d189`](https://github.com/nodejs/node/commit/976f32d189)] - **test**: refactor addons-napi/test\_promise/test.js (ka3e) [#16814](https://github.com/nodejs/node/pull/16814) +* [[`2476ab9619`](https://github.com/nodejs/node/commit/2476ab9619)] - **test**: improve error emssage reporting in testNapiRun.js (Paul Ashfield) [#16821](https://github.com/nodejs/node/pull/16821) +* [[`d4c04e05f7`](https://github.com/nodejs/node/commit/d4c04e05f7)] - **test**: improve assert messages in napi exception test (Paul Blanche) [#16820](https://github.com/nodejs/node/pull/16820) +* [[`c14207c77b`](https://github.com/nodejs/node/commit/c14207c77b)] - **test**: add detailed message for assertion failure (Attila Gonda) [#16812](https://github.com/nodejs/node/pull/16812) +* [[`d31792fcbe`](https://github.com/nodejs/node/commit/d31792fcbe)] - **test**: use default assertion messages (John Byrne) [#16808](https://github.com/nodejs/node/pull/16808) +* [[`087d213f67`](https://github.com/nodejs/node/commit/087d213f67)] - **test**: include actual value in assertion message (Matthew Cantelon) [#15935](https://github.com/nodejs/node/pull/15935) +* [[`9cc435dc85`](https://github.com/nodejs/node/commit/9cc435dc85)] - **test**: improve message for assert.strictEqual() (Jayson D. Henkel) [#16013](https://github.com/nodejs/node/pull/16013) +* [[`ebbd07dd27`](https://github.com/nodejs/node/commit/ebbd07dd27)] - **test**: remove redundant error messages (Christina Chan) [#16043](https://github.com/nodejs/node/pull/16043) +* [[`5bba809e01`](https://github.com/nodejs/node/commit/5bba809e01)] - **test**: cleaned up assert messages (mrgorbo) [#16032](https://github.com/nodejs/node/pull/16032) +* [[`53bd313739`](https://github.com/nodejs/node/commit/53bd313739)] - **test**: fix race condition in addon test (Kinnan Kwok) [#16037](https://github.com/nodejs/node/pull/16037) +* [[`37acd806be`](https://github.com/nodejs/node/commit/37acd806be)] - **test**: remove template literal (Emily Ford) [#15953](https://github.com/nodejs/node/pull/15953) +* [[`31c97178c1`](https://github.com/nodejs/node/commit/31c97178c1)] - **test**: remove unused parameters (Daniil Shakir) [#14968](https://github.com/nodejs/node/pull/14968) +* [[`b59eddd082`](https://github.com/nodejs/node/commit/b59eddd082)] - **test**: use regular expressions in throw assertions (Vincent Xue) [#14318](https://github.com/nodejs/node/pull/14318) +* [[`06b1273464`](https://github.com/nodejs/node/commit/06b1273464)] - **test**: changed error message validator (Pratik Jain) [#14443](https://github.com/nodejs/node/pull/14443) +* [[`3f3eaf9961`](https://github.com/nodejs/node/commit/3f3eaf9961)] - **test**: replace string concat with template literal (Song, Bintao Garfield) [#14269](https://github.com/nodejs/node/pull/14269) +* [[`48274213b1`](https://github.com/nodejs/node/commit/48274213b1)] - **test**: handle missing V8 tests in n-api test (cjihrig) [#14123](https://github.com/nodejs/node/pull/14123) +* [[`7f126c2069`](https://github.com/nodejs/node/commit/7f126c2069)] - **test**: add coverage for napi\_typeof (Michael Dawson) [#13990](https://github.com/nodejs/node/pull/13990) +* [[`a0cf9b7a73`](https://github.com/nodejs/node/commit/a0cf9b7a73)] - **test**: verify napi\_get\_property() walks prototype (cjihrig) [#13961](https://github.com/nodejs/node/pull/13961) +* [[`1e25062fa1`](https://github.com/nodejs/node/commit/1e25062fa1)] - **test**: add coverage for napi\_property\_descriptor (Michael Dawson) [#13510](https://github.com/nodejs/node/pull/13510) +* [[`eb422796cd`](https://github.com/nodejs/node/commit/eb422796cd)] - **test**: fix build warning in addons-napi/test\_object (Jason Ginchereau) [#13412](https://github.com/nodejs/node/pull/13412) +* [[`9d70b43bdc`](https://github.com/nodejs/node/commit/9d70b43bdc)] - **test**: consolidate n-api test addons - part2 (Michael Dawson) [#13380](https://github.com/nodejs/node/pull/13380) +* [[`06cf9480d3`](https://github.com/nodejs/node/commit/06cf9480d3)] - **test**: consolidate n-api test addons (Michael Dawson) [#13317](https://github.com/nodejs/node/pull/13317) +* [[`652d3218fe`](https://github.com/nodejs/node/commit/652d3218fe)] - **test**: Make N-API weak-ref GC tests asynchronous (Jason Ginchereau) [#13121](https://github.com/nodejs/node/pull/13121) +* [[`0dac33d4f2`](https://github.com/nodejs/node/commit/0dac33d4f2)] - **test**: improve n-api coverage for typed arrays (Michael Dawson) [#13244](https://github.com/nodejs/node/pull/13244) +* [[`1829d25907`](https://github.com/nodejs/node/commit/1829d25907)] - **test**: add coverage for napi\_has\_named\_property (Michael Dawson) [#13178](https://github.com/nodejs/node/pull/13178) +* [[`d89afe8685`](https://github.com/nodejs/node/commit/d89afe8685)] - **test**: increase n-api constructor coverage (Michael Dawson) [#13124](https://github.com/nodejs/node/pull/13124) +* [[`71aa251671`](https://github.com/nodejs/node/commit/71aa251671)] - **test**: Improve N-API test coverage (Michael Dawson) [#13044](https://github.com/nodejs/node/pull/13044) +* [[`314f22dcf4`](https://github.com/nodejs/node/commit/314f22dcf4)] - **test**: improve N-API test coverage (Michael Dawson) [#13006](https://github.com/nodejs/node/pull/13006) +* [[`263a633d5e`](https://github.com/nodejs/node/commit/263a633d5e)] - **test**: add common.mustCall() to NAPI exception test (Rich Trott) [#12959](https://github.com/nodejs/node/pull/12959) +* [[`5936f7c9bb`](https://github.com/nodejs/node/commit/5936f7c9bb)] - **test**: improve n-api array func coverage (Michael Dawson) [#12890](https://github.com/nodejs/node/pull/12890) +* [[`ce03977f30`](https://github.com/nodejs/node/commit/ce03977f30)] - **test**: fix napi test\_reference for recent V8 (Michaël Zasso) [#12864](https://github.com/nodejs/node/pull/12864) +* [[`dd7665a68e`](https://github.com/nodejs/node/commit/dd7665a68e)] - **test**: port test for make\_callback to n-api (Hitesh Kanwathirtha) [#12409](https://github.com/nodejs/node/pull/12409) +* [[`f09677fdba`](https://github.com/nodejs/node/commit/f09677fdba)] - **test**: add coverage for error apis (Michael Dawson) [#12729](https://github.com/nodejs/node/pull/12729) +* [[`1785f3cf44`](https://github.com/nodejs/node/commit/1785f3cf44)] - **test**: fix warning in n-api reference test (Michael Dawson) [#12730](https://github.com/nodejs/node/pull/12730) +* [[`5d2afb2174`](https://github.com/nodejs/node/commit/5d2afb2174)] - **test**: replace indexOf with includes (gwer) [#12604](https://github.com/nodejs/node/pull/12604) +* [[`fcb019f6ea`](https://github.com/nodejs/node/commit/fcb019f6ea)] - **test**: add coverage for napi\_cancel\_async\_work (Michael Dawson) [#12575](https://github.com/nodejs/node/pull/12575) +* [[`72c5d976f1`](https://github.com/nodejs/node/commit/72c5d976f1)] - **test**: test doc'd napi\_get\_value\_int32 behaviour (Michael Dawson) [#12633](https://github.com/nodejs/node/pull/12633) +* [[`d9f3e0dd83`](https://github.com/nodejs/node/commit/d9f3e0dd83)] - ***Revert*** "**test**: port test for make\_callback to n-api" (James M Snell) [#12475](https://github.com/nodejs/node/pull/12475) +* [[`a003777d96`](https://github.com/nodejs/node/commit/a003777d96)] - **test**: port test for make\_callback to n-api (Hitesh Kanwathirtha) [#12409](https://github.com/nodejs/node/pull/12409) +* [[`577f327d2c`](https://github.com/nodejs/node/commit/577f327d2c)] - **test**: fix compiler warning in n-api test (Anna Henningsen) [#12318](https://github.com/nodejs/node/pull/12318) +* [[`f8c2585fe0`](https://github.com/nodejs/node/commit/f8c2585fe0)] - **test**: add second argument to assert.throws (Michaël Zasso) [#12270](https://github.com/nodejs/node/pull/12270) +* [[`6bf3d04d6c`](https://github.com/nodejs/node/commit/6bf3d04d6c)] - **test**: improve test coverage for n-api (Michael Dawson) [#12327](https://github.com/nodejs/node/pull/12327) +* [[`d799b1cb61`](https://github.com/nodejs/node/commit/d799b1cb61)] - **test**: update a few tests to work on OpenBSD (Aaron Bieber) [#18543](https://github.com/nodejs/node/pull/18543) +* [[`bc883fb136`](https://github.com/nodejs/node/commit/bc883fb136)] - **test**: refactor test-http-abort-before-end (cjihrig) [#18508](https://github.com/nodejs/node/pull/18508) +* [[`44ab85018c`](https://github.com/nodejs/node/commit/44ab85018c)] - **test**: fix flaky timers-block-eventloop test (Anatoli Papirovski) [#18567](https://github.com/nodejs/node/pull/18567) +* [[`5bcf668f42`](https://github.com/nodejs/node/commit/5bcf668f42)] - **test**: use correct size in test-stream-buffer-list (Luigi Pinca) [#18239](https://github.com/nodejs/node/pull/18239) +* [[`f3c6febedf`](https://github.com/nodejs/node/commit/f3c6febedf)] - **test**: update references to archived repository (Tobias Nießen) [#17924](https://github.com/nodejs/node/pull/17924) +* [[`b2a2a55271`](https://github.com/nodejs/node/commit/b2a2a55271)] - **test**: verify the shell option works properly on execFile (jvelezpo) [#18384](https://github.com/nodejs/node/pull/18384) +* [[`fd7d1990db`](https://github.com/nodejs/node/commit/fd7d1990db)] - **test**: remove orphaned entries from status (Kyle Farnung) [#19042](https://github.com/nodejs/node/pull/19042) +* [[`5ca8dee8cb`](https://github.com/nodejs/node/commit/5ca8dee8cb)] - **test**: remove n-api intermediate files (Gabriel Schulhof) [#19375](https://github.com/nodejs/node/pull/19375) +* [[`46aed5800f`](https://github.com/nodejs/node/commit/46aed5800f)] - **test**: make common.mustNotCall show file:linenumber (Lance Ball) [#17257](https://github.com/nodejs/node/pull/17257) +* [[`4d2efa2415`](https://github.com/nodejs/node/commit/4d2efa2415)] - **test**: remove mark flaky for moved test (Beth Griggs) [#19069](https://github.com/nodejs/node/pull/19069) +* [[`502781c1d7`](https://github.com/nodejs/node/commit/502781c1d7)] - **test**: fix spelling in test case comments (Tobias Nießen) [#18018](https://github.com/nodejs/node/pull/18018) +* [[`b2bf6c873f`](https://github.com/nodejs/node/commit/b2bf6c873f)] - **test,lib,doc**: use function declarations (Rich Trott) [#12711](https://github.com/nodejs/node/pull/12711) +* [[`a91b1b928c`](https://github.com/nodejs/node/commit/a91b1b928c)] - **win, build**: fix intl-none option (Birunthan Mohanathas) [#18292](https://github.com/nodejs/node/pull/18292) +* [[`6ff763bd66`](https://github.com/nodejs/node/commit/6ff763bd66)] - **win, build**: fix without-intl option (Bartosz Sosnowski) [#17614](https://github.com/nodejs/node/pull/17614) + ## 2018-03-29, Version 6.14.1 'Boron' (LTS), @MylesBorins diff --git a/src/node_version.h b/src/node_version.h index e71ed35478e3b7..1bef707c808c2a 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -8,7 +8,7 @@ #define NODE_VERSION_IS_LTS 1 #define NODE_VERSION_LTS_CODENAME "Boron" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n)