From 502781c1d782e3c270585bc75056e4028ffb96b6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?=
Date: Sat, 6 Jan 2018 19:34:27 +0100
Subject: [PATCH 001/227] test: fix spelling in test case comments
PR-URL: https://github.com/nodejs/node/pull/18018
Reviewed-By: Jon Moss
Reviewed-By: Anatoli Papirovski
Reviewed-By: Weijia Wang
Reviewed-By: Luigi Pinca
Reviewed-By: James M Snell
Reviewed-By: Colin Ihrig
Reviewed-By: Yuta Hiroto
---
test/abort/test-http-parser-consume.js | 4 ++--
test/fixtures/net-fd-passing-receiver.js | 2 +-
test/parallel/test-buffer-fill.js | 4 ++--
test/parallel/test-buffer-includes.js | 2 +-
test/parallel/test-buffer-indexof.js | 2 +-
test/parallel/test-buffer-read.js | 4 ++--
test/parallel/test-child-process-internal.js | 2 +-
test/parallel/test-crypto-deprecated.js | 2 +-
test/parallel/test-crypto-fips.js | 2 +-
...hrow-error-then-throw-from-uncaught-exception-handler.js | 2 +-
test/parallel/test-event-emitter-remove-listeners.js | 4 ++--
test/parallel/test-fs-access.js | 2 +-
test/parallel/test-http-agent-keepalive.js | 4 ++--
test/parallel/test-http-extra-response.js | 1 -
test/parallel/test-https-agent-secure-protocol.js | 2 +-
test/parallel/test-https-socket-options.js | 2 +-
test/parallel/test-net-listen-error.js | 2 +-
test/parallel/test-net-pipe-connect-errors.js | 2 +-
test/parallel/test-os.js | 2 +-
test/parallel/test-promises-unhandled-rejections.js | 2 +-
test/parallel/test-readline-interface.js | 2 +-
test/parallel/test-repl.js | 2 +-
test/parallel/test-stream2-transform.js | 6 +++---
test/parallel/test-stream3-cork-uncork.js | 2 +-
test/parallel/test-stringbytes-external.js | 2 +-
test/parallel/test-tls-cnnic-whitelist.js | 2 +-
test/parallel/test-tls-server-verify.js | 4 ++--
27 files changed, 34 insertions(+), 35 deletions(-)
diff --git a/test/abort/test-http-parser-consume.js b/test/abort/test-http-parser-consume.js
index 9115aba70dbf17..673e04cfa3a573 100644
--- a/test/abort/test-http-parser-consume.js
+++ b/test/abort/test-http-parser-consume.js
@@ -11,12 +11,12 @@ if (process.argv[2] === 'child') {
const rr = get({ port: server.address().port }, common.mustCall(() => {
// This bad input (0) should abort the parser and the process
rr.parser.consume(0);
- // This line should be unreachanble.
+ // This line should be unreachable.
assert.fail('this should be unreachable');
}));
}));
} else {
- // super-proces
+ // super-process
const child = spawn(process.execPath, [__filename, 'child']);
child.stdout.on('data', common.mustNotCall());
diff --git a/test/fixtures/net-fd-passing-receiver.js b/test/fixtures/net-fd-passing-receiver.js
index fb4faee1264464..e6d2de43862b12 100644
--- a/test/fixtures/net-fd-passing-receiver.js
+++ b/test/fixtures/net-fd-passing-receiver.js
@@ -24,7 +24,7 @@ receiver = net.createServer(function(socket) {
});
});
-/* To signal the test runne we're up and listening */
+/* To signal the test runner we're up and listening */
receiver.on('listening', function() {
console.log('ready');
});
diff --git a/test/parallel/test-buffer-fill.js b/test/parallel/test-buffer-fill.js
index 8de05bc5dcd9ac..b4c7e2f139cb83 100644
--- a/test/parallel/test-buffer-fill.js
+++ b/test/parallel/test-buffer-fill.js
@@ -334,7 +334,7 @@ Buffer.alloc(8, '');
return 0;
} else {
elseWasLast = true;
- // Once buffer.js calls the C++ implemenation of fill, return -1
+ // Once buffer.js calls the C++ implementation of fill, return -1
return -1;
}
}
@@ -367,7 +367,7 @@ assert.throws(() => {
return 1;
} else {
elseWasLast = true;
- // Once buffer.js calls the C++ implemenation of fill, return -1
+ // Once buffer.js calls the C++ implementation of fill, return -1
return -1;
}
}
diff --git a/test/parallel/test-buffer-includes.js b/test/parallel/test-buffer-includes.js
index e610bc7e59fa33..eadeb8dd4e4b9b 100644
--- a/test/parallel/test-buffer-includes.js
+++ b/test/parallel/test-buffer-includes.js
@@ -137,7 +137,7 @@ assert.strictEqual(
);
-// test usc2 encoding
+// test ucs2 encoding
let twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2');
assert(twoByteString.includes('\u0395', 4, 'ucs2'));
diff --git a/test/parallel/test-buffer-indexof.js b/test/parallel/test-buffer-indexof.js
index decb90c6db31d5..3f23683fc00179 100644
--- a/test/parallel/test-buffer-indexof.js
+++ b/test/parallel/test-buffer-indexof.js
@@ -553,7 +553,7 @@ assert.strictEqual(511, longBufferString.lastIndexOf(pattern, 1534));
// "yolo swag swag yolo swag yolo yolo swag" ..., goes on for about 5MB.
// This is hard to search because it all looks similar, but never repeats.
-// countBits returns the number of bits in the binary reprsentation of n.
+// countBits returns the number of bits in the binary representation of n.
function countBits(n) {
let count;
for (count = 0; n > 0; count++) {
diff --git a/test/parallel/test-buffer-read.js b/test/parallel/test-buffer-read.js
index 5eac575ff5ab81..e8f3bad383fd69 100644
--- a/test/parallel/test-buffer-read.js
+++ b/test/parallel/test-buffer-read.js
@@ -20,11 +20,11 @@ function read(buff, funx, args, expected) {
}
-// testing basic functionality of readDoubleBE() and readDOubleLE()
+// testing basic functionality of readDoubleBE() and readDoubleLE()
read(buf, 'readDoubleBE', [1], -3.1827727774563287e+295);
read(buf, 'readDoubleLE', [1], -6.966010051009108e+144);
-// testing basic functionality of readFLoatBE() and readFloatLE()
+// testing basic functionality of readFloatBE() and readFloatLE()
read(buf, 'readFloatBE', [1], -1.6691549692541768e+37);
read(buf, 'readFloatLE', [1], -7861303808);
diff --git a/test/parallel/test-child-process-internal.js b/test/parallel/test-child-process-internal.js
index bf5554bc2d45f9..b5c5d2cb77b431 100644
--- a/test/parallel/test-child-process-internal.js
+++ b/test/parallel/test-child-process-internal.js
@@ -11,7 +11,7 @@ if (process.argv[2] === 'child') {
//send non-internal message containing PREFIX at a non prefix position
process.send(normal);
- //send inernal message
+ //send internal message
process.send(internal);
process.exit(0);
diff --git a/test/parallel/test-crypto-deprecated.js b/test/parallel/test-crypto-deprecated.js
index 84f25316d49b61..acdd71301fbed0 100644
--- a/test/parallel/test-crypto-deprecated.js
+++ b/test/parallel/test-crypto-deprecated.js
@@ -14,7 +14,7 @@ common.expectWarning('DeprecationWarning', [
// Accessing the deprecated function is enough to trigger the warning event.
// It does not need to be called. So the assert serves the purpose of both
-// triggering the warning event and confirming that the deprected function is
+// triggering the warning event and confirming that the deprecated function is
// mapped to the correct non-deprecated function.
assert.strictEqual(crypto.Credentials, tls.SecureContext);
assert.strictEqual(crypto.createCredentials, tls.createSecureContext);
diff --git a/test/parallel/test-crypto-fips.js b/test/parallel/test-crypto-fips.js
index ffab5d19ba2869..54f85188034e84 100644
--- a/test/parallel/test-crypto-fips.js
+++ b/test/parallel/test-crypto-fips.js
@@ -91,7 +91,7 @@ testHelper(
// to try to call the fips setter, to try to detect this situation, as
// that would throw an error:
// ("Error: Cannot set FIPS mode in a non-FIPS build.").
-// Due to this uncertanty the following tests are skipped when configured
+// Due to this uncertainty the following tests are skipped when configured
// with --shared-openssl.
if (!sharedOpenSSL()) {
// OpenSSL config file should be able to turn on FIPS mode
diff --git a/test/parallel/test-domain-throw-error-then-throw-from-uncaught-exception-handler.js b/test/parallel/test-domain-throw-error-then-throw-from-uncaught-exception-handler.js
index 089300bc481c10..a2afebd838f410 100644
--- a/test/parallel/test-domain-throw-error-then-throw-from-uncaught-exception-handler.js
+++ b/test/parallel/test-domain-throw-error-then-throw-from-uncaught-exception-handler.js
@@ -25,7 +25,7 @@ if (process.argv[2] === 'child') {
// is not properly flushed in V8's Isolate::Throw right before the
// process aborts due to an uncaught exception, and thus the error
// message representing the error that was thrown cannot be read by the
- // parent process. So instead of parsing the child process' stdandard
+ // parent process. So instead of parsing the child process' standard
// error, the parent process will check that in the case
// --abort-on-uncaught-exception was passed, the process did not exit
// with exit code RAN_UNCAUGHT_EXCEPTION_HANDLER_EXIT_CODE.
diff --git a/test/parallel/test-event-emitter-remove-listeners.js b/test/parallel/test-event-emitter-remove-listeners.js
index ed28bc7308ea9c..f365cbba3f0fff 100644
--- a/test/parallel/test-event-emitter-remove-listeners.js
+++ b/test/parallel/test-event-emitter-remove-listeners.js
@@ -98,10 +98,10 @@ function listener2() {}
// listener4 will still be called although it is removed by listener 3.
ee.emit('hello');
- // This is so because the interal listener array at time of emit
+ // This is so because the internal listener array at time of emit
// was [listener3,listener4]
- // Interal listener array [listener3]
+ // Internal listener array [listener3]
ee.emit('hello');
}
diff --git a/test/parallel/test-fs-access.js b/test/parallel/test-fs-access.js
index 1579b71ae6e370..67187672017ca2 100644
--- a/test/parallel/test-fs-access.js
+++ b/test/parallel/test-fs-access.js
@@ -37,7 +37,7 @@ createFileWithPerms(readWriteFile, 0o666);
* The change of user id is done after creating the fixtures files for the same
* reason: the test may be run as the superuser within a directory in which
* only the superuser can create files, and thus it may need superuser
- * priviledges to create them.
+ * privileges to create them.
*
* There's not really any point in resetting the process' user id to 0 after
* changing it to 'nobody', since in the case that the test runs without
diff --git a/test/parallel/test-http-agent-keepalive.js b/test/parallel/test-http-agent-keepalive.js
index 7f9626d4669ef9..7215ab0b438a08 100644
--- a/test/parallel/test-http-agent-keepalive.js
+++ b/test/parallel/test-http-agent-keepalive.js
@@ -68,7 +68,7 @@ function remoteClose() {
process.nextTick(common.mustCall(() => {
assert.strictEqual(agent.sockets[name], undefined);
assert.strictEqual(agent.freeSockets[name].length, 1);
- // waitting remote server close the socket
+ // waiting remote server close the socket
setTimeout(common.mustCall(() => {
assert.strictEqual(agent.sockets[name], undefined);
assert.strictEqual(agent.freeSockets[name], undefined,
@@ -81,7 +81,7 @@ function remoteClose() {
}
function remoteError() {
- // remove server will destroy ths socket
+ // remote server will destroy the socket
const req = get('/error', common.mustNotCall());
req.on('error', common.mustCall((err) => {
assert(err);
diff --git a/test/parallel/test-http-extra-response.js b/test/parallel/test-http-extra-response.js
index 3cdc96a2d6d56b..c092f17f2a6385 100644
--- a/test/parallel/test-http-extra-response.js
+++ b/test/parallel/test-http-extra-response.js
@@ -30,7 +30,6 @@ const server = net.createServer(function(socket) {
if (postBody.includes('\r\n')) {
socket.write(fullResponse);
- // omg, I wrote the response twice, what a terrible HTTP server I am.
socket.end(fullResponse);
}
});
diff --git a/test/parallel/test-https-agent-secure-protocol.js b/test/parallel/test-https-agent-secure-protocol.js
index 737bd6692f26aa..0d6b8c340dcac2 100644
--- a/test/parallel/test-https-agent-secure-protocol.js
+++ b/test/parallel/test-https-agent-secure-protocol.js
@@ -45,7 +45,7 @@ server.listen(0, common.mustCall(function() {
}, common.mustCall(function(res) {
res.resume();
globalAgent.once('free', common.mustCall(function() {
- // Verify that two keep-alived connections are created
+ // Verify that two keep-alive connections are created
// due to the different secureProtocol settings:
const keys = Object.keys(globalAgent.freeSockets);
assert.strictEqual(keys.length, 2);
diff --git a/test/parallel/test-https-socket-options.js b/test/parallel/test-https-socket-options.js
index f43ad6c3726ab3..af31677c4be92f 100644
--- a/test/parallel/test-https-socket-options.js
+++ b/test/parallel/test-https-socket-options.js
@@ -40,7 +40,7 @@ server_http.listen(0, function() {
});
// Then try https server (requires functions to be
-// mirroed in tls.js's CryptoStream)
+// mirrored in tls.js's CryptoStream)
const server_https = https.createServer(options, function(req, res) {
console.log('got HTTPS request');
diff --git a/test/parallel/test-net-listen-error.js b/test/parallel/test-net-listen-error.js
index d672226de7b37f..8d6e9d10b22e5c 100644
--- a/test/parallel/test-net-listen-error.js
+++ b/test/parallel/test-net-listen-error.js
@@ -4,5 +4,5 @@ const net = require('net');
const server = net.createServer(function(socket) {
});
-server.listen(1, '1.1.1.1', common.mustNotCall()); // EACCESS or EADDRNOTAVAIL
+server.listen(1, '1.1.1.1', common.mustNotCall()); // EACCES or EADDRNOTAVAIL
server.on('error', common.mustCall(function(error) {}));
diff --git a/test/parallel/test-net-pipe-connect-errors.js b/test/parallel/test-net-pipe-connect-errors.js
index 1bb56c62c93ee9..1dfbe092073f29 100644
--- a/test/parallel/test-net-pipe-connect-errors.js
+++ b/test/parallel/test-net-pipe-connect-errors.js
@@ -16,7 +16,7 @@ if (common.isWindows) {
emptyTxt = fixtures.path('empty.txt');
} else {
common.refreshTmpDir();
- // Keep the file name very short so tht we don't exceed the 108 char limit
+ // Keep the file name very short so that we don't exceed the 108 char limit
// on CI for a POSIX socket. Even though this isn't actually a socket file,
// the error will be different from the one we are expecting if we exceed the
// limit.
diff --git a/test/parallel/test-os.js b/test/parallel/test-os.js
index 1bd7e01eeed4bf..fb2d440eed1d6c 100644
--- a/test/parallel/test-os.js
+++ b/test/parallel/test-os.js
@@ -92,7 +92,7 @@ is.string(arch);
assert.ok(arch.length > 0);
if (!common.isSunOS) {
- // not implemeneted yet
+ // not implemented yet
assert.ok(os.loadavg().length > 0);
assert.ok(os.freemem() > 0);
assert.ok(os.totalmem() > 0);
diff --git a/test/parallel/test-promises-unhandled-rejections.js b/test/parallel/test-promises-unhandled-rejections.js
index 934073aecccdda..d5323db19aafd7 100644
--- a/test/parallel/test-promises-unhandled-rejections.js
+++ b/test/parallel/test-promises-unhandled-rejections.js
@@ -293,7 +293,7 @@ asyncTest('While inside setImmediate, catching a rejected promise derived ' +
});
});
-// State adapation tests
+// State adaptation tests
asyncTest('catching a promise which is asynchronously rejected (via ' +
'resolution to an asynchronously-rejected promise) prevents' +
' unhandledRejection', function(done) {
diff --git a/test/parallel/test-readline-interface.js b/test/parallel/test-readline-interface.js
index 8da5982cff9ada..7840794e9739a6 100644
--- a/test/parallel/test-readline-interface.js
+++ b/test/parallel/test-readline-interface.js
@@ -765,7 +765,7 @@ function isWarned(emitter) {
assert.strictEqual(isWarned(process.stdout._events), false);
}
- // can create a new readline Interface with a null output arugument
+ // can create a new readline Interface with a null output argument
{
const fi = new FakeInput();
const rli = new readline.Interface(
diff --git a/test/parallel/test-repl.js b/test/parallel/test-repl.js
index 44cd7f3383caeb..31ecbe2f2c790f 100644
--- a/test/parallel/test-repl.js
+++ b/test/parallel/test-repl.js
@@ -281,7 +281,7 @@ function error_test() {
{ client: client_unix,
send: '/(.)(.)(.)(.)(.)(.)(.)(.)(.)/.test(\'123456789\')\n',
expect: `true\n${prompt_unix}` },
- // the following test's result depends on the RegEx's match from the above
+ // the following test's result depends on the RegExp's match from the above
{ client: client_unix,
send: 'RegExp.$1\nRegExp.$2\nRegExp.$3\nRegExp.$4\nRegExp.$5\n' +
'RegExp.$6\nRegExp.$7\nRegExp.$8\nRegExp.$9\n',
diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js
index d2ca48f0f46ad5..b2968758cfa1cc 100644
--- a/test/parallel/test-stream2-transform.js
+++ b/test/parallel/test-stream2-transform.js
@@ -154,7 +154,7 @@ const Transform = require('_stream_transform');
}
{
- // Verify assymetric transform (expand)
+ // Verify asymmetric transform (expand)
const pt = new Transform();
// emit each chunk 2 times.
@@ -186,7 +186,7 @@ const Transform = require('_stream_transform');
}
{
- // Verify assymetric trasform (compress)
+ // Verify asymmetric transform (compress)
const pt = new Transform();
// each output is the first char of 3 consecutive chunks,
@@ -241,7 +241,7 @@ const Transform = require('_stream_transform');
// this tests for a stall when data is written to a full stream
// that has empty transforms.
{
- // Verify compex transform behavior
+ // Verify complex transform behavior
let count = 0;
let saved = null;
const pt = new Transform({highWaterMark: 3});
diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js
index 2e8e86be1ef058..f8b411c84eded6 100644
--- a/test/parallel/test-stream3-cork-uncork.js
+++ b/test/parallel/test-stream3-cork-uncork.js
@@ -65,7 +65,7 @@ writeChunks(inputChunks, () => {
// trigger writing out the buffer
w.uncork();
- // buffered bytes shoud be seen in current tick
+ // buffered bytes should be seen in current tick
assert.strictEqual(seenChunks.length, 4);
// did the chunks match
diff --git a/test/parallel/test-stringbytes-external.js b/test/parallel/test-stringbytes-external.js
index 3775ecc13ec2dd..a2dae7786bbfab 100644
--- a/test/parallel/test-stringbytes-external.js
+++ b/test/parallel/test-stringbytes-external.js
@@ -61,7 +61,7 @@ assert.strictEqual(c_bin.toString('latin1'), ucs2_control);
assert.strictEqual(c_ucs.toString('latin1'), ucs2_control);
-// now let's test BASE64 and HEX ecoding/decoding
+// now let's test BASE64 and HEX encoding/decoding
const RADIOS = 2;
const PRE_HALF_APEX = Math.ceil(EXTERN_APEX / 2) - RADIOS;
const PRE_3OF4_APEX = Math.ceil((EXTERN_APEX / 4) * 3) - RADIOS;
diff --git a/test/parallel/test-tls-cnnic-whitelist.js b/test/parallel/test-tls-cnnic-whitelist.js
index 4887526435fa2d..80f188f36670a1 100644
--- a/test/parallel/test-tls-cnnic-whitelist.js
+++ b/test/parallel/test-tls-cnnic-whitelist.js
@@ -31,7 +31,7 @@ const testCases = [
errorCode: 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'
},
// Test 1: for the fix of node#2061
- // agent6-cert.pem is signed by intermidate cert of ca3.
+ // agent6-cert.pem is signed by intermediate cert of ca3.
// The server has a cert chain of agent6->ca3->ca1(root) but
// tls.connect should be failed with an error of
// UNABLE_TO_GET_ISSUER_CERT_LOCALLY since the root CA of ca1 is not
diff --git a/test/parallel/test-tls-server-verify.js b/test/parallel/test-tls-server-verify.js
index 6f045235217ee8..51dfc06c565800 100644
--- a/test/parallel/test-tls-server-verify.js
+++ b/test/parallel/test-tls-server-verify.js
@@ -265,8 +265,8 @@ function runTest(port, testIndex) {
let renegotiated = false;
const server = tls.Server(serverOptions, function handleConnection(c) {
c.on('error', function(e) {
- // child.kill() leads ECONNRESET errro in the TLS connection of
- // openssl s_client via spawn(). A Test result is already
+ // child.kill() leads ECONNRESET error in the TLS connection of
+ // openssl s_client via spawn(). A test result is already
// checked by the data of client.stdout before child.kill() so
// these tls errors can be ignored.
});
From f81a69aefec99470b9527edf00da2c29e6db3bb3 Mon Sep 17 00:00:00 2001
From: Anna Henningsen
Date: Tue, 13 Mar 2018 20:42:33 +0100
Subject: [PATCH 002/227] =?UTF-8?q?fs:=20fix=20`createReadStream(=E2=80=A6?=
=?UTF-8?q?,=20{end:=20n})`=20for=20non-seekable=20fds?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
82bdf8fba2d3f fixed an issue by silently modifying the `start`
option for the case when only `end` is passed, in order to perform
reads from a specified range in the file.
However, that approach does not work for non-seekable files, since
a numeric `start` option means that positioned reads will be used
to read data from the file.
This patch fixes that, and instead ends reading after a specified
size by adjusting the read buffer size.
This way we avoid re-introducing the bug that 82bdf8fba2d3f fixed,
and align behaviour with the native file stream mechanism
introduced in https://github.com/nodejs/node/pull/18936 as well.
Backport-PR-URL: https://github.com/nodejs/node/pull/19411
PR-URL: https://github.com/nodejs/node/pull/19329
Fixes: https://github.com/nodejs/node/issues/19240
Refs: https://github.com/nodejs/node/pull/18121
Reviewed-By: James M Snell
Reviewed-By: Matteo Collina
Reviewed-By: Chen Gang
---
lib/fs.js | 11 ++++++--
test/parallel/test-fs-read-stream.js | 39 ++++++++++++++++++++++++----
2 files changed, 43 insertions(+), 7 deletions(-)
diff --git a/lib/fs.js b/lib/fs.js
index 5efccceb544d20..b0d117673d6bfc 100644
--- a/lib/fs.js
+++ b/lib/fs.js
@@ -1919,8 +1919,7 @@ function ReadStream(path, options) {
this.flags = options.flags === undefined ? 'r' : options.flags;
this.mode = options.mode === undefined ? 0o666 : options.mode;
- this.start = typeof this.fd !== 'number' && options.start === undefined ?
- 0 : options.start;
+ this.start = options.start;
this.end = options.end;
this.autoClose = options.autoClose === undefined ? true : options.autoClose;
this.pos = undefined;
@@ -1943,6 +1942,12 @@ function ReadStream(path, options) {
this.pos = this.start;
}
+ // Backwards compatibility: Make sure `end` is a number regardless of `start`.
+ // TODO(addaleax): Make the above typecheck not depend on `start` instead.
+ // (That is a semver-major change).
+ if (typeof this.end !== 'number')
+ this.end = Infinity;
+
if (typeof this.fd !== 'number')
this.open();
@@ -1996,6 +2001,8 @@ ReadStream.prototype._read = function(n) {
if (this.pos !== undefined)
toRead = Math.min(this.end - this.pos + 1, toRead);
+ else
+ toRead = Math.min(this.end - this.bytesRead + 1, toRead);
// already read everything we were supposed to read!
// treat as EOF.
diff --git a/test/parallel/test-fs-read-stream.js b/test/parallel/test-fs-read-stream.js
index 95d5fbeaef9973..5161fd068b14d6 100644
--- a/test/parallel/test-fs-read-stream.js
+++ b/test/parallel/test-fs-read-stream.js
@@ -1,5 +1,7 @@
'use strict';
const common = require('../common');
+
+const child_process = require('child_process');
const assert = require('assert');
const fixtures = require('../common/fixtures');
@@ -146,11 +148,6 @@ stream.on('end', function() {
}));
}
-// pause and then resume immediately.
-const pauseRes = fs.createReadStream(rangeFile);
-pauseRes.pause();
-pauseRes.resume();
-
let file7 = fs.createReadStream(rangeFile, {autoClose: false });
file7.on('data', () => {});
file7.on('end', function() {
@@ -173,6 +170,38 @@ function file7Next() {
});
}
+if (!common.isWindows) {
+ // Verify that end works when start is not specified, and we do not try to
+ // use positioned reads. This makes sure that this keeps working for
+ // non-seekable file descriptors.
+ common.refreshTmpDir();
+ const filename = `${common.tmpDir}/foo.pipe`;
+ const mkfifoResult = child_process.spawnSync('mkfifo', [filename]);
+ if (!mkfifoResult.error) {
+ child_process.exec(`echo "xyz foobar" > '${filename}'`);
+ const stream = new fs.createReadStream(filename, { end: 1 });
+ stream.data = '';
+
+ stream.on('data', function(chunk) {
+ stream.data += chunk;
+ });
+
+ stream.on('end', common.mustCall(function() {
+ assert.strictEqual('xy', stream.data);
+ fs.unlinkSync(filename);
+ }));
+ } else {
+ common.printSkipMessage('mkfifo not available');
+ }
+}
+
+{
+ // pause and then resume immediately.
+ const pauseRes = fs.createReadStream(rangeFile);
+ pauseRes.pause();
+ pauseRes.resume();
+}
+
// Just to make sure autoClose won't close the stream because of error.
const file8 = fs.createReadStream(null, {fd: 13337, autoClose: false });
file8.on('data', () => {});
From 4d2efa241535c7af32f1533139d73deacb3c86f2 Mon Sep 17 00:00:00 2001
From: Beth Griggs
Date: Thu, 1 Mar 2018 12:23:31 +0000
Subject: [PATCH 003/227] test: remove mark flaky for moved test
Remove 'flaky' in parallel.status for test-debug-signal-cluster as the
test was moved to sequential.
Refs: https://github.com/nodejs/node/pull/13592
PR-URL: https://github.com/nodejs/node/pull/19069
Reviewed-By: Gireesh Punathil
Reviewed-By: Benjamin Gruenbaum
Reviewed-By: Colin Ihrig
Reviewed-By: James M Snell
Reviewed-By: Rich Trott
Reviewed-By: Richard Lau
Reviewed-By: Gibson Fahnestock
---
test/parallel/parallel.status | 4 ----
1 file changed, 4 deletions(-)
diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status
index 65da4e2c75ae20..b74b4a2c29c318 100644
--- a/test/parallel/parallel.status
+++ b/test/parallel/parallel.status
@@ -17,11 +17,7 @@ test-fs-read-buffer-tostring-fail : PASS,FLAKY
test-npm-install: PASS,FLAKY
[$system==solaris] # Also applies to SmartOS
-test-debug-signal-cluster : PASS,FLAKY
[$system==freebsd]
[$system==aix]
-#covered by https://github.com/nodejs/node/issues/3796
-# but more frequent on AIX ?
-test-debug-signal-cluster : PASS, FLAKY
\ No newline at end of file
From 969398d08e062ec6dfe77de5cc6bc336f1a89dcc Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?=
Date: Mon, 18 Dec 2017 13:23:46 +0100
Subject: [PATCH 004/227] crypto: reuse variable instead of reevaluation
Backport-PR-URL: https://github.com/nodejs/node/pull/19115
PR-URL: https://github.com/nodejs/node/pull/17735
Reviewed-By: Colin Ihrig
Reviewed-By: Daniel Bevenius
Reviewed-By: Jon Moss
Reviewed-By: James M Snell
Reviewed-By: Luigi Pinca
---
src/node_crypto.cc | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/node_crypto.cc b/src/node_crypto.cc
index fddb125a726696..3a9ebe7e97ad9f 100644
--- a/src/node_crypto.cc
+++ b/src/node_crypto.cc
@@ -3256,7 +3256,7 @@ void CipherBase::Init(const char* cipher_type,
nullptr,
reinterpret_cast(key),
reinterpret_cast(iv),
- kind_ == kCipher);
+ encrypt);
initialised_ = true;
}
@@ -3324,7 +3324,7 @@ void CipherBase::InitIv(const char* cipher_type,
nullptr,
reinterpret_cast(key),
reinterpret_cast(iv),
- kind_ == kCipher);
+ encrypt);
initialised_ = true;
}
From 46aed5800f7989f47bc134c6cd06124b11600087 Mon Sep 17 00:00:00 2001
From: Lance Ball
Date: Wed, 22 Nov 2017 13:49:50 -0500
Subject: [PATCH 005/227] test: make common.mustNotCall show file:linenumber
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
When a test fails via `common.mustNotCall` it is sometimes hard to
determine exactly what was called. This modification stores the
caller's file and line number by using the V8 Error API to capture
a stack at the time `common.mustNotCall()` is called. In the event
of failure, this information is printed.
This change also exposes a new function in test/common, `getCallSite()`
which accepts a `function` and returns a `String` with the file name and
line number for the function.
Backport-PR-URL: https://github.com/nodejs/node/pull/19355
PR-URL: https://github.com/nodejs/node/pull/17257
Reviewed-By: James M Snell
Reviewed-By: Michaël Zasso
Reviewed-By: Gibson Fahnestock
Reviewed-By: Tobias Nießen
Reviewed-By: Joyee Cheung
Reviewed-By: Sakthipriyan Vairamani
Reviewed-By: Khaidi Chu
---
test/common/README.md | 6 +++
test/common/index.js | 44 +++++++++++++++++++++-
test/parallel/test-common-must-not-call.js | 26 +++++++++++++
3 files changed, 75 insertions(+), 1 deletion(-)
create mode 100644 test/parallel/test-common-must-not-call.js
diff --git a/test/common/README.md b/test/common/README.md
index 4e7e955487bd3f..8a6ce5f60456c5 100644
--- a/test/common/README.md
+++ b/test/common/README.md
@@ -94,6 +94,12 @@ Path to the 'fixtures' directory.
Returns an instance of all possible `ArrayBufferView`s of the provided Buffer.
+### getCallSite(func)
+* `func` [<Function>]
+* return [<String>]
+
+Returns the file name and line number for the provided Function.
+
### globalCheck
* [<Boolean>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type)
diff --git a/test/common/index.js b/test/common/index.js
index 7747cb57cf3ae7..d762f4e0aa8498 100644
--- a/test/common/index.js
+++ b/test/common/index.js
@@ -480,9 +480,51 @@ exports.fileExists = function(pathname) {
}
};
+exports.canCreateSymLink = function() {
+ // On Windows, creating symlinks requires admin privileges.
+ // We'll only try to run symlink test if we have enough privileges.
+ // On other platforms, creating symlinks shouldn't need admin privileges
+ if (exports.isWindows) {
+ // whoami.exe needs to be the one from System32
+ // If unix tools are in the path, they can shadow the one we want,
+ // so use the full path while executing whoami
+ const whoamiPath = path.join(process.env['SystemRoot'],
+ 'System32', 'whoami.exe');
+
+ let err = false;
+ let output = '';
+
+ try {
+ output = execSync(`${whoamiPath} /priv`, { timout: 1000 });
+ } catch (e) {
+ err = true;
+ } finally {
+ if (err || !output.includes('SeCreateSymbolicLinkPrivilege')) {
+ return false;
+ }
+ }
+ }
+
+ return true;
+};
+
+exports.getCallSite = function getCallSite(top) {
+ const originalStackFormatter = Error.prepareStackTrace;
+ Error.prepareStackTrace = (err, stack) =>
+ `${stack[0].getFileName()}:${stack[0].getLineNumber()}`;
+ const err = new Error();
+ Error.captureStackTrace(err, top);
+ // with the V8 Error API, the stack is not formatted until it is accessed
+ err.stack;
+ Error.prepareStackTrace = originalStackFormatter;
+ return err.stack;
+};
+
exports.mustNotCall = function(msg) {
+ const callSite = exports.getCallSite(exports.mustNotCall);
return function mustNotCall() {
- assert.fail(msg || 'function should not have been called');
+ assert.fail(
+ `${msg || 'function should not have been called'} at ${callSite}`);
};
};
diff --git a/test/parallel/test-common-must-not-call.js b/test/parallel/test-common-must-not-call.js
new file mode 100644
index 00000000000000..d70daabf0a4bd0
--- /dev/null
+++ b/test/parallel/test-common-must-not-call.js
@@ -0,0 +1,26 @@
+'use strict';
+
+const common = require('../common');
+const assert = require('assert');
+const path = require('path');
+
+const message = 'message';
+const testFunction = common.mustNotCall(message);
+
+const validateError = common.mustCall((e) => {
+ const prefix = `${message} at `;
+ assert.ok(e.message.startsWith(prefix));
+ if (process.platform === 'win32') {
+ e.message = e.message.substring(2); // remove 'C:'
+ }
+ const [ fileName, lineNumber ] = e.message
+ .substring(prefix.length).split(':');
+ assert.strictEqual(path.basename(fileName), 'test-common-must-not-call.js');
+ assert.strictEqual(lineNumber, '8');
+});
+
+try {
+ testFunction();
+} catch (e) {
+ validateError(e);
+}
From 5ca8dee8cbff435b62a574027ec5c2daa76eba52 Mon Sep 17 00:00:00 2001
From: Gabriel Schulhof
Date: Thu, 15 Mar 2018 12:14:54 -0400
Subject: [PATCH 006/227] test: remove n-api intermediate files
PR-URL: https://github.com/nodejs/node/pull/19375
Reviewed-By: Myles Borins
---
test/addons-napi/1_hello_world/build/Makefile | 342 ----------
.../1_hello_world/build/binding.Makefile | 6 -
.../1_hello_world/build/binding.target.mk | 178 -----
.../1_hello_world/build/config.gypi | 70 --
.../1_hello_world/build/gyp-mac-tool | 611 ------------------
.../2_function_arguments/build/Makefile | 342 ----------
.../build/binding.Makefile | 6 -
.../build/binding.target.mk | 178 -----
.../2_function_arguments/build/config.gypi | 70 --
.../2_function_arguments/build/gyp-mac-tool | 611 ------------------
test/addons-napi/3_callbacks/build/Makefile | 342 ----------
.../3_callbacks/build/binding.Makefile | 6 -
.../3_callbacks/build/binding.target.mk | 178 -----
.../addons-napi/3_callbacks/build/config.gypi | 70 --
.../3_callbacks/build/gyp-mac-tool | 611 ------------------
.../4_object_factory/build/Makefile | 342 ----------
.../4_object_factory/build/binding.Makefile | 6 -
.../4_object_factory/build/binding.target.mk | 178 -----
.../4_object_factory/build/config.gypi | 70 --
.../4_object_factory/build/gyp-mac-tool | 611 ------------------
.../5_function_factory/build/Makefile | 342 ----------
.../5_function_factory/build/binding.Makefile | 6 -
.../build/binding.target.mk | 178 -----
.../5_function_factory/build/config.gypi | 70 --
.../5_function_factory/build/gyp-mac-tool | 611 ------------------
test/addons-napi/6_object_wrap/build/Makefile | 342 ----------
.../6_object_wrap/build/binding.Makefile | 6 -
.../6_object_wrap/build/binding.target.mk | 179 -----
.../6_object_wrap/build/config.gypi | 70 --
.../6_object_wrap/build/gyp-mac-tool | 611 ------------------
.../addons-napi/7_factory_wrap/build/Makefile | 342 ----------
.../7_factory_wrap/build/binding.Makefile | 6 -
.../7_factory_wrap/build/binding.target.mk | 179 -----
.../7_factory_wrap/build/config.gypi | 70 --
.../7_factory_wrap/build/gyp-mac-tool | 611 ------------------
.../8_passing_wrapped/build/Makefile | 342 ----------
.../8_passing_wrapped/build/binding.Makefile | 6 -
.../8_passing_wrapped/build/binding.target.mk | 179 -----
.../8_passing_wrapped/build/config.gypi | 70 --
.../8_passing_wrapped/build/gyp-mac-tool | 611 ------------------
test/addons-napi/test_array/build/Makefile | 342 ----------
.../test_array/build/binding.Makefile | 6 -
test/addons-napi/test_array/build/config.gypi | 70 --
.../addons-napi/test_array/build/gyp-mac-tool | 611 ------------------
.../test_array/build/test_array.target.mk | 178 -----
test/addons-napi/test_async/build/Makefile | 342 ----------
.../test_async/build/binding.Makefile | 6 -
test/addons-napi/test_async/build/config.gypi | 70 --
.../addons-napi/test_async/build/gyp-mac-tool | 611 ------------------
.../test_async/build/test_async.target.mk | 178 -----
test/addons-napi/test_buffer/build/Makefile | 342 ----------
.../test_buffer/build/binding.Makefile | 6 -
.../addons-napi/test_buffer/build/config.gypi | 70 --
.../test_buffer/build/gyp-mac-tool | 611 ------------------
.../test_buffer/build/test_buffer.target.mk | 178 -----
.../test_constructor/build/Makefile | 342 ----------
.../test_constructor/build/binding.Makefile | 6 -
.../test_constructor/build/config.gypi | 70 --
.../test_constructor/build/gyp-mac-tool | 611 ------------------
.../build/test_constructor.target.mk | 178 -----
.../test_conversions/build/Makefile | 342 ----------
.../test_conversions/build/binding.Makefile | 6 -
.../test_conversions/build/config.gypi | 70 --
.../test_conversions/build/gyp-mac-tool | 611 ------------------
.../build/test_conversions.target.mk | 178 -----
test/addons-napi/test_dataview/build/Makefile | 342 ----------
.../test_dataview/build/binding.Makefile | 6 -
.../test_dataview/build/config.gypi | 70 --
.../test_dataview/build/gyp-mac-tool | 611 ------------------
.../build/test_dataview.target.mk | 178 -----
.../test_env_sharing/build/Makefile | 347 ----------
.../test_env_sharing/build/binding.Makefile | 6 -
.../build/compare_env.target.mk | 178 -----
.../test_env_sharing/build/config.gypi | 70 --
.../test_env_sharing/build/gyp-mac-tool | 611 ------------------
.../build/store_env.target.mk | 178 -----
test/addons-napi/test_error/build/Makefile | 342 ----------
.../test_error/build/binding.Makefile | 6 -
test/addons-napi/test_error/build/config.gypi | 70 --
.../addons-napi/test_error/build/gyp-mac-tool | 611 ------------------
.../test_error/build/test_error.target.mk | 178 -----
.../addons-napi/test_exception/build/Makefile | 342 ----------
.../test_exception/build/binding.Makefile | 6 -
.../test_exception/build/config.gypi | 70 --
.../test_exception/build/gyp-mac-tool | 611 ------------------
.../build/test_exception.target.mk | 178 -----
test/addons-napi/test_fatal/build/Makefile | 342 ----------
.../test_fatal/build/binding.Makefile | 6 -
test/addons-napi/test_fatal/build/config.gypi | 70 --
.../addons-napi/test_fatal/build/gyp-mac-tool | 611 ------------------
.../test_fatal/build/test_fatal.target.mk | 178 -----
test/addons-napi/test_function/build/Makefile | 342 ----------
.../test_function/build/binding.Makefile | 6 -
.../test_function/build/config.gypi | 70 --
.../test_function/build/gyp-mac-tool | 611 ------------------
.../build/test_function.target.mk | 178 -----
test/addons-napi/test_general/build/Makefile | 342 ----------
.../test_general/build/binding.Makefile | 6 -
.../test_general/build/config.gypi | 70 --
.../test_general/build/gyp-mac-tool | 611 ------------------
.../test_general/build/test_general.target.mk | 178 -----
.../test_handle_scope/build/Makefile | 342 ----------
.../test_handle_scope/build/binding.Makefile | 6 -
.../test_handle_scope/build/config.gypi | 70 --
.../test_handle_scope/build/gyp-mac-tool | 611 ------------------
.../build/test_handle_scope.target.mk | 178 -----
.../test_make_callback/build/Makefile | 342 ----------
.../test_make_callback/build/binding.Makefile | 6 -
.../build/binding.target.mk | 178 -----
.../test_make_callback/build/config.gypi | 70 --
.../test_make_callback/build/gyp-mac-tool | 611 ------------------
.../test_make_callback_recurse/build/Makefile | 342 ----------
.../build/binding.Makefile | 6 -
.../build/binding.target.mk | 178 -----
.../build/config.gypi | 70 --
.../build/gyp-mac-tool | 611 ------------------
test/addons-napi/test_number/build/Makefile | 342 ----------
.../test_number/build/binding.Makefile | 6 -
.../addons-napi/test_number/build/config.gypi | 70 --
.../test_number/build/gyp-mac-tool | 611 ------------------
.../test_number/build/test_number.target.mk | 178 -----
test/addons-napi/test_object/build/Makefile | 342 ----------
.../test_object/build/binding.Makefile | 6 -
.../addons-napi/test_object/build/config.gypi | 70 --
.../test_object/build/gyp-mac-tool | 611 ------------------
.../test_object/build/test_object.target.mk | 178 -----
test/addons-napi/test_promise/build/Makefile | 342 ----------
.../test_promise/build/binding.Makefile | 6 -
.../test_promise/build/config.gypi | 70 --
.../test_promise/build/gyp-mac-tool | 611 ------------------
.../test_promise/build/test_promise.target.mk | 178 -----
.../test_properties/build/Makefile | 342 ----------
.../test_properties/build/binding.Makefile | 6 -
.../test_properties/build/config.gypi | 70 --
.../test_properties/build/gyp-mac-tool | 611 ------------------
.../build/test_properties.target.mk | 178 -----
.../addons-napi/test_reference/build/Makefile | 342 ----------
.../test_reference/build/binding.Makefile | 6 -
.../test_reference/build/config.gypi | 70 --
.../test_reference/build/gyp-mac-tool | 611 ------------------
.../build/test_reference.target.mk | 178 -----
test/addons-napi/test_string/build/Makefile | 342 ----------
.../test_string/build/binding.Makefile | 6 -
.../addons-napi/test_string/build/config.gypi | 70 --
.../test_string/build/gyp-mac-tool | 611 ------------------
.../test_string/build/test_string.target.mk | 178 -----
test/addons-napi/test_symbol/build/Makefile | 342 ----------
.../test_symbol/build/binding.Makefile | 6 -
.../addons-napi/test_symbol/build/config.gypi | 70 --
.../test_symbol/build/gyp-mac-tool | 611 ------------------
.../test_symbol/build/test_symbol.target.mk | 178 -----
.../test_typedarray/build/Makefile | 342 ----------
.../test_typedarray/build/binding.Makefile | 6 -
.../test_typedarray/build/config.gypi | 70 --
.../test_typedarray/build/gyp-mac-tool | 611 ------------------
.../build/test_typedarray.target.mk | 178 -----
156 files changed, 37603 deletions(-)
delete mode 100644 test/addons-napi/1_hello_world/build/Makefile
delete mode 100644 test/addons-napi/1_hello_world/build/binding.Makefile
delete mode 100644 test/addons-napi/1_hello_world/build/binding.target.mk
delete mode 100644 test/addons-napi/1_hello_world/build/config.gypi
delete mode 100755 test/addons-napi/1_hello_world/build/gyp-mac-tool
delete mode 100644 test/addons-napi/2_function_arguments/build/Makefile
delete mode 100644 test/addons-napi/2_function_arguments/build/binding.Makefile
delete mode 100644 test/addons-napi/2_function_arguments/build/binding.target.mk
delete mode 100644 test/addons-napi/2_function_arguments/build/config.gypi
delete mode 100755 test/addons-napi/2_function_arguments/build/gyp-mac-tool
delete mode 100644 test/addons-napi/3_callbacks/build/Makefile
delete mode 100644 test/addons-napi/3_callbacks/build/binding.Makefile
delete mode 100644 test/addons-napi/3_callbacks/build/binding.target.mk
delete mode 100644 test/addons-napi/3_callbacks/build/config.gypi
delete mode 100755 test/addons-napi/3_callbacks/build/gyp-mac-tool
delete mode 100644 test/addons-napi/4_object_factory/build/Makefile
delete mode 100644 test/addons-napi/4_object_factory/build/binding.Makefile
delete mode 100644 test/addons-napi/4_object_factory/build/binding.target.mk
delete mode 100644 test/addons-napi/4_object_factory/build/config.gypi
delete mode 100755 test/addons-napi/4_object_factory/build/gyp-mac-tool
delete mode 100644 test/addons-napi/5_function_factory/build/Makefile
delete mode 100644 test/addons-napi/5_function_factory/build/binding.Makefile
delete mode 100644 test/addons-napi/5_function_factory/build/binding.target.mk
delete mode 100644 test/addons-napi/5_function_factory/build/config.gypi
delete mode 100755 test/addons-napi/5_function_factory/build/gyp-mac-tool
delete mode 100644 test/addons-napi/6_object_wrap/build/Makefile
delete mode 100644 test/addons-napi/6_object_wrap/build/binding.Makefile
delete mode 100644 test/addons-napi/6_object_wrap/build/binding.target.mk
delete mode 100644 test/addons-napi/6_object_wrap/build/config.gypi
delete mode 100755 test/addons-napi/6_object_wrap/build/gyp-mac-tool
delete mode 100644 test/addons-napi/7_factory_wrap/build/Makefile
delete mode 100644 test/addons-napi/7_factory_wrap/build/binding.Makefile
delete mode 100644 test/addons-napi/7_factory_wrap/build/binding.target.mk
delete mode 100644 test/addons-napi/7_factory_wrap/build/config.gypi
delete mode 100755 test/addons-napi/7_factory_wrap/build/gyp-mac-tool
delete mode 100644 test/addons-napi/8_passing_wrapped/build/Makefile
delete mode 100644 test/addons-napi/8_passing_wrapped/build/binding.Makefile
delete mode 100644 test/addons-napi/8_passing_wrapped/build/binding.target.mk
delete mode 100644 test/addons-napi/8_passing_wrapped/build/config.gypi
delete mode 100755 test/addons-napi/8_passing_wrapped/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_array/build/Makefile
delete mode 100644 test/addons-napi/test_array/build/binding.Makefile
delete mode 100644 test/addons-napi/test_array/build/config.gypi
delete mode 100755 test/addons-napi/test_array/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_array/build/test_array.target.mk
delete mode 100644 test/addons-napi/test_async/build/Makefile
delete mode 100644 test/addons-napi/test_async/build/binding.Makefile
delete mode 100644 test/addons-napi/test_async/build/config.gypi
delete mode 100755 test/addons-napi/test_async/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_async/build/test_async.target.mk
delete mode 100644 test/addons-napi/test_buffer/build/Makefile
delete mode 100644 test/addons-napi/test_buffer/build/binding.Makefile
delete mode 100644 test/addons-napi/test_buffer/build/config.gypi
delete mode 100755 test/addons-napi/test_buffer/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_buffer/build/test_buffer.target.mk
delete mode 100644 test/addons-napi/test_constructor/build/Makefile
delete mode 100644 test/addons-napi/test_constructor/build/binding.Makefile
delete mode 100644 test/addons-napi/test_constructor/build/config.gypi
delete mode 100755 test/addons-napi/test_constructor/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_constructor/build/test_constructor.target.mk
delete mode 100644 test/addons-napi/test_conversions/build/Makefile
delete mode 100644 test/addons-napi/test_conversions/build/binding.Makefile
delete mode 100644 test/addons-napi/test_conversions/build/config.gypi
delete mode 100755 test/addons-napi/test_conversions/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_conversions/build/test_conversions.target.mk
delete mode 100644 test/addons-napi/test_dataview/build/Makefile
delete mode 100644 test/addons-napi/test_dataview/build/binding.Makefile
delete mode 100644 test/addons-napi/test_dataview/build/config.gypi
delete mode 100755 test/addons-napi/test_dataview/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_dataview/build/test_dataview.target.mk
delete mode 100644 test/addons-napi/test_env_sharing/build/Makefile
delete mode 100644 test/addons-napi/test_env_sharing/build/binding.Makefile
delete mode 100644 test/addons-napi/test_env_sharing/build/compare_env.target.mk
delete mode 100644 test/addons-napi/test_env_sharing/build/config.gypi
delete mode 100755 test/addons-napi/test_env_sharing/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_env_sharing/build/store_env.target.mk
delete mode 100644 test/addons-napi/test_error/build/Makefile
delete mode 100644 test/addons-napi/test_error/build/binding.Makefile
delete mode 100644 test/addons-napi/test_error/build/config.gypi
delete mode 100755 test/addons-napi/test_error/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_error/build/test_error.target.mk
delete mode 100644 test/addons-napi/test_exception/build/Makefile
delete mode 100644 test/addons-napi/test_exception/build/binding.Makefile
delete mode 100644 test/addons-napi/test_exception/build/config.gypi
delete mode 100755 test/addons-napi/test_exception/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_exception/build/test_exception.target.mk
delete mode 100644 test/addons-napi/test_fatal/build/Makefile
delete mode 100644 test/addons-napi/test_fatal/build/binding.Makefile
delete mode 100644 test/addons-napi/test_fatal/build/config.gypi
delete mode 100755 test/addons-napi/test_fatal/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_fatal/build/test_fatal.target.mk
delete mode 100644 test/addons-napi/test_function/build/Makefile
delete mode 100644 test/addons-napi/test_function/build/binding.Makefile
delete mode 100644 test/addons-napi/test_function/build/config.gypi
delete mode 100755 test/addons-napi/test_function/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_function/build/test_function.target.mk
delete mode 100644 test/addons-napi/test_general/build/Makefile
delete mode 100644 test/addons-napi/test_general/build/binding.Makefile
delete mode 100644 test/addons-napi/test_general/build/config.gypi
delete mode 100755 test/addons-napi/test_general/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_general/build/test_general.target.mk
delete mode 100644 test/addons-napi/test_handle_scope/build/Makefile
delete mode 100644 test/addons-napi/test_handle_scope/build/binding.Makefile
delete mode 100644 test/addons-napi/test_handle_scope/build/config.gypi
delete mode 100755 test/addons-napi/test_handle_scope/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_handle_scope/build/test_handle_scope.target.mk
delete mode 100644 test/addons-napi/test_make_callback/build/Makefile
delete mode 100644 test/addons-napi/test_make_callback/build/binding.Makefile
delete mode 100644 test/addons-napi/test_make_callback/build/binding.target.mk
delete mode 100644 test/addons-napi/test_make_callback/build/config.gypi
delete mode 100755 test/addons-napi/test_make_callback/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_make_callback_recurse/build/Makefile
delete mode 100644 test/addons-napi/test_make_callback_recurse/build/binding.Makefile
delete mode 100644 test/addons-napi/test_make_callback_recurse/build/binding.target.mk
delete mode 100644 test/addons-napi/test_make_callback_recurse/build/config.gypi
delete mode 100755 test/addons-napi/test_make_callback_recurse/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_number/build/Makefile
delete mode 100644 test/addons-napi/test_number/build/binding.Makefile
delete mode 100644 test/addons-napi/test_number/build/config.gypi
delete mode 100755 test/addons-napi/test_number/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_number/build/test_number.target.mk
delete mode 100644 test/addons-napi/test_object/build/Makefile
delete mode 100644 test/addons-napi/test_object/build/binding.Makefile
delete mode 100644 test/addons-napi/test_object/build/config.gypi
delete mode 100755 test/addons-napi/test_object/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_object/build/test_object.target.mk
delete mode 100644 test/addons-napi/test_promise/build/Makefile
delete mode 100644 test/addons-napi/test_promise/build/binding.Makefile
delete mode 100644 test/addons-napi/test_promise/build/config.gypi
delete mode 100755 test/addons-napi/test_promise/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_promise/build/test_promise.target.mk
delete mode 100644 test/addons-napi/test_properties/build/Makefile
delete mode 100644 test/addons-napi/test_properties/build/binding.Makefile
delete mode 100644 test/addons-napi/test_properties/build/config.gypi
delete mode 100755 test/addons-napi/test_properties/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_properties/build/test_properties.target.mk
delete mode 100644 test/addons-napi/test_reference/build/Makefile
delete mode 100644 test/addons-napi/test_reference/build/binding.Makefile
delete mode 100644 test/addons-napi/test_reference/build/config.gypi
delete mode 100755 test/addons-napi/test_reference/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_reference/build/test_reference.target.mk
delete mode 100644 test/addons-napi/test_string/build/Makefile
delete mode 100644 test/addons-napi/test_string/build/binding.Makefile
delete mode 100644 test/addons-napi/test_string/build/config.gypi
delete mode 100755 test/addons-napi/test_string/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_string/build/test_string.target.mk
delete mode 100644 test/addons-napi/test_symbol/build/Makefile
delete mode 100644 test/addons-napi/test_symbol/build/binding.Makefile
delete mode 100644 test/addons-napi/test_symbol/build/config.gypi
delete mode 100755 test/addons-napi/test_symbol/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_symbol/build/test_symbol.target.mk
delete mode 100644 test/addons-napi/test_typedarray/build/Makefile
delete mode 100644 test/addons-napi/test_typedarray/build/binding.Makefile
delete mode 100644 test/addons-napi/test_typedarray/build/config.gypi
delete mode 100755 test/addons-napi/test_typedarray/build/gyp-mac-tool
delete mode 100644 test/addons-napi/test_typedarray/build/test_typedarray.target.mk
diff --git a/test/addons-napi/1_hello_world/build/Makefile b/test/addons-napi/1_hello_world/build/Makefile
deleted file mode 100644
index cb2e7e62b06c84..00000000000000
--- a/test/addons-napi/1_hello_world/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/1_hello_world/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/1_hello_world" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/1_hello_world/build/binding.Makefile b/test/addons-napi/1_hello_world/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/1_hello_world/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/1_hello_world/build/binding.target.mk b/test/addons-napi/1_hello_world/build/binding.target.mk
deleted file mode 100644
index a707bab99ab136..00000000000000
--- a/test/addons-napi/1_hello_world/build/binding.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/1_hello_world/build/config.gypi b/test/addons-napi/1_hello_world/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/1_hello_world/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/1_hello_world/build/gyp-mac-tool b/test/addons-napi/1_hello_world/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/1_hello_world/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/2_function_arguments/build/Makefile b/test/addons-napi/2_function_arguments/build/Makefile
deleted file mode 100644
index 0e2a4d23079f0d..00000000000000
--- a/test/addons-napi/2_function_arguments/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/2_function_arguments/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/2_function_arguments" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/2_function_arguments/build/binding.Makefile b/test/addons-napi/2_function_arguments/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/2_function_arguments/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/2_function_arguments/build/binding.target.mk b/test/addons-napi/2_function_arguments/build/binding.target.mk
deleted file mode 100644
index a707bab99ab136..00000000000000
--- a/test/addons-napi/2_function_arguments/build/binding.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/2_function_arguments/build/config.gypi b/test/addons-napi/2_function_arguments/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/2_function_arguments/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/2_function_arguments/build/gyp-mac-tool b/test/addons-napi/2_function_arguments/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/2_function_arguments/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/3_callbacks/build/Makefile b/test/addons-napi/3_callbacks/build/Makefile
deleted file mode 100644
index 1d8dde16395e8f..00000000000000
--- a/test/addons-napi/3_callbacks/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/3_callbacks/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/3_callbacks" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/3_callbacks/build/binding.Makefile b/test/addons-napi/3_callbacks/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/3_callbacks/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/3_callbacks/build/binding.target.mk b/test/addons-napi/3_callbacks/build/binding.target.mk
deleted file mode 100644
index a707bab99ab136..00000000000000
--- a/test/addons-napi/3_callbacks/build/binding.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/3_callbacks/build/config.gypi b/test/addons-napi/3_callbacks/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/3_callbacks/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/3_callbacks/build/gyp-mac-tool b/test/addons-napi/3_callbacks/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/3_callbacks/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/4_object_factory/build/Makefile b/test/addons-napi/4_object_factory/build/Makefile
deleted file mode 100644
index 90bca121611c40..00000000000000
--- a/test/addons-napi/4_object_factory/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/4_object_factory/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/4_object_factory" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/4_object_factory/build/binding.Makefile b/test/addons-napi/4_object_factory/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/4_object_factory/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/4_object_factory/build/binding.target.mk b/test/addons-napi/4_object_factory/build/binding.target.mk
deleted file mode 100644
index a707bab99ab136..00000000000000
--- a/test/addons-napi/4_object_factory/build/binding.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/4_object_factory/build/config.gypi b/test/addons-napi/4_object_factory/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/4_object_factory/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/4_object_factory/build/gyp-mac-tool b/test/addons-napi/4_object_factory/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/4_object_factory/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/5_function_factory/build/Makefile b/test/addons-napi/5_function_factory/build/Makefile
deleted file mode 100644
index 6b2c6c7e501a65..00000000000000
--- a/test/addons-napi/5_function_factory/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/5_function_factory/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/5_function_factory" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/5_function_factory/build/binding.Makefile b/test/addons-napi/5_function_factory/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/5_function_factory/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/5_function_factory/build/binding.target.mk b/test/addons-napi/5_function_factory/build/binding.target.mk
deleted file mode 100644
index a707bab99ab136..00000000000000
--- a/test/addons-napi/5_function_factory/build/binding.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/5_function_factory/build/config.gypi b/test/addons-napi/5_function_factory/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/5_function_factory/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/5_function_factory/build/gyp-mac-tool b/test/addons-napi/5_function_factory/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/5_function_factory/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/6_object_wrap/build/Makefile b/test/addons-napi/6_object_wrap/build/Makefile
deleted file mode 100644
index 55e2154f05c0de..00000000000000
--- a/test/addons-napi/6_object_wrap/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/6_object_wrap/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/6_object_wrap" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/6_object_wrap/build/binding.Makefile b/test/addons-napi/6_object_wrap/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/6_object_wrap/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/6_object_wrap/build/binding.target.mk b/test/addons-napi/6_object_wrap/build/binding.target.mk
deleted file mode 100644
index 6bca2c1f4f6abd..00000000000000
--- a/test/addons-napi/6_object_wrap/build/binding.target.mk
+++ /dev/null
@@ -1,179 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o \
- $(obj).target/$(TARGET)/myobject.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/6_object_wrap/build/config.gypi b/test/addons-napi/6_object_wrap/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/6_object_wrap/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/6_object_wrap/build/gyp-mac-tool b/test/addons-napi/6_object_wrap/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/6_object_wrap/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/7_factory_wrap/build/Makefile b/test/addons-napi/7_factory_wrap/build/Makefile
deleted file mode 100644
index 24479d1ac381a2..00000000000000
--- a/test/addons-napi/7_factory_wrap/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/7_factory_wrap/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/7_factory_wrap" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/7_factory_wrap/build/binding.Makefile b/test/addons-napi/7_factory_wrap/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/7_factory_wrap/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/7_factory_wrap/build/binding.target.mk b/test/addons-napi/7_factory_wrap/build/binding.target.mk
deleted file mode 100644
index 6bca2c1f4f6abd..00000000000000
--- a/test/addons-napi/7_factory_wrap/build/binding.target.mk
+++ /dev/null
@@ -1,179 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o \
- $(obj).target/$(TARGET)/myobject.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/7_factory_wrap/build/config.gypi b/test/addons-napi/7_factory_wrap/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/7_factory_wrap/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/7_factory_wrap/build/gyp-mac-tool b/test/addons-napi/7_factory_wrap/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/7_factory_wrap/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/8_passing_wrapped/build/Makefile b/test/addons-napi/8_passing_wrapped/build/Makefile
deleted file mode 100644
index 260978b733a982..00000000000000
--- a/test/addons-napi/8_passing_wrapped/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/8_passing_wrapped/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/8_passing_wrapped" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/8_passing_wrapped/build/binding.Makefile b/test/addons-napi/8_passing_wrapped/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/8_passing_wrapped/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/8_passing_wrapped/build/binding.target.mk b/test/addons-napi/8_passing_wrapped/build/binding.target.mk
deleted file mode 100644
index 6bca2c1f4f6abd..00000000000000
--- a/test/addons-napi/8_passing_wrapped/build/binding.target.mk
+++ /dev/null
@@ -1,179 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o \
- $(obj).target/$(TARGET)/myobject.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/8_passing_wrapped/build/config.gypi b/test/addons-napi/8_passing_wrapped/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/8_passing_wrapped/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/8_passing_wrapped/build/gyp-mac-tool b/test/addons-napi/8_passing_wrapped/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/8_passing_wrapped/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_array/build/Makefile b/test/addons-napi/test_array/build/Makefile
deleted file mode 100644
index 0293bded3c6164..00000000000000
--- a/test/addons-napi/test_array/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_array.target.mk)))),)
- include test_array.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_array/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_array" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_array/build/binding.Makefile b/test/addons-napi/test_array/build/binding.Makefile
deleted file mode 100644
index c393342cfa470c..00000000000000
--- a/test/addons-napi/test_array/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_array
diff --git a/test/addons-napi/test_array/build/config.gypi b/test/addons-napi/test_array/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_array/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_array/build/gyp-mac-tool b/test/addons-napi/test_array/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_array/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_array/build/test_array.target.mk b/test/addons-napi/test_array/build/test_array.target.mk
deleted file mode 100644
index ada554d767be62..00000000000000
--- a/test/addons-napi/test_array/build/test_array.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_array
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_array' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_array' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_array.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_array.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_array.node: LIBS := $(LIBS)
-$(builddir)/test_array.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_array.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_array.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_array.node
-# Add target alias
-.PHONY: test_array
-test_array: $(builddir)/test_array.node
-
-# Short alias for building this executable.
-.PHONY: test_array.node
-test_array.node: $(builddir)/test_array.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_array.node
-
diff --git a/test/addons-napi/test_async/build/Makefile b/test/addons-napi/test_async/build/Makefile
deleted file mode 100644
index 9f28d1a37e76fa..00000000000000
--- a/test/addons-napi/test_async/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_async.target.mk)))),)
- include test_async.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_async/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_async" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_async/build/binding.Makefile b/test/addons-napi/test_async/build/binding.Makefile
deleted file mode 100644
index b3712e0a3b057a..00000000000000
--- a/test/addons-napi/test_async/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_async
diff --git a/test/addons-napi/test_async/build/config.gypi b/test/addons-napi/test_async/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_async/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_async/build/gyp-mac-tool b/test/addons-napi/test_async/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_async/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_async/build/test_async.target.mk b/test/addons-napi/test_async/build/test_async.target.mk
deleted file mode 100644
index 86fcb28e45668d..00000000000000
--- a/test/addons-napi/test_async/build/test_async.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_async
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_async' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_async' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_async.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_async.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_async.node: LIBS := $(LIBS)
-$(builddir)/test_async.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_async.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_async.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_async.node
-# Add target alias
-.PHONY: test_async
-test_async: $(builddir)/test_async.node
-
-# Short alias for building this executable.
-.PHONY: test_async.node
-test_async.node: $(builddir)/test_async.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_async.node
-
diff --git a/test/addons-napi/test_buffer/build/Makefile b/test/addons-napi/test_buffer/build/Makefile
deleted file mode 100644
index 3ccdeb364558af..00000000000000
--- a/test/addons-napi/test_buffer/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_buffer.target.mk)))),)
- include test_buffer.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_buffer/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_buffer" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_buffer/build/binding.Makefile b/test/addons-napi/test_buffer/build/binding.Makefile
deleted file mode 100644
index 1f1489b1507bc6..00000000000000
--- a/test/addons-napi/test_buffer/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_buffer
diff --git a/test/addons-napi/test_buffer/build/config.gypi b/test/addons-napi/test_buffer/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_buffer/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_buffer/build/gyp-mac-tool b/test/addons-napi/test_buffer/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_buffer/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_buffer/build/test_buffer.target.mk b/test/addons-napi/test_buffer/build/test_buffer.target.mk
deleted file mode 100644
index 789051b14480fc..00000000000000
--- a/test/addons-napi/test_buffer/build/test_buffer.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_buffer
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_buffer' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_buffer' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_buffer.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_buffer.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_buffer.node: LIBS := $(LIBS)
-$(builddir)/test_buffer.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_buffer.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_buffer.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_buffer.node
-# Add target alias
-.PHONY: test_buffer
-test_buffer: $(builddir)/test_buffer.node
-
-# Short alias for building this executable.
-.PHONY: test_buffer.node
-test_buffer.node: $(builddir)/test_buffer.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_buffer.node
-
diff --git a/test/addons-napi/test_constructor/build/Makefile b/test/addons-napi/test_constructor/build/Makefile
deleted file mode 100644
index eaa7fc33d320c6..00000000000000
--- a/test/addons-napi/test_constructor/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_constructor.target.mk)))),)
- include test_constructor.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_constructor/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_constructor" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_constructor/build/binding.Makefile b/test/addons-napi/test_constructor/build/binding.Makefile
deleted file mode 100644
index f6f9987fe08426..00000000000000
--- a/test/addons-napi/test_constructor/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_constructor
diff --git a/test/addons-napi/test_constructor/build/config.gypi b/test/addons-napi/test_constructor/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_constructor/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_constructor/build/gyp-mac-tool b/test/addons-napi/test_constructor/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_constructor/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_constructor/build/test_constructor.target.mk b/test/addons-napi/test_constructor/build/test_constructor.target.mk
deleted file mode 100644
index 9af9de38c6467c..00000000000000
--- a/test/addons-napi/test_constructor/build/test_constructor.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_constructor
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_constructor' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_constructor' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_constructor.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_constructor.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_constructor.node: LIBS := $(LIBS)
-$(builddir)/test_constructor.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_constructor.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_constructor.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_constructor.node
-# Add target alias
-.PHONY: test_constructor
-test_constructor: $(builddir)/test_constructor.node
-
-# Short alias for building this executable.
-.PHONY: test_constructor.node
-test_constructor.node: $(builddir)/test_constructor.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_constructor.node
-
diff --git a/test/addons-napi/test_conversions/build/Makefile b/test/addons-napi/test_conversions/build/Makefile
deleted file mode 100644
index f0d95d652ab64c..00000000000000
--- a/test/addons-napi/test_conversions/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_conversions.target.mk)))),)
- include test_conversions.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_conversions/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_conversions" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_conversions/build/binding.Makefile b/test/addons-napi/test_conversions/build/binding.Makefile
deleted file mode 100644
index 3c236072468774..00000000000000
--- a/test/addons-napi/test_conversions/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_conversions
diff --git a/test/addons-napi/test_conversions/build/config.gypi b/test/addons-napi/test_conversions/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_conversions/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_conversions/build/gyp-mac-tool b/test/addons-napi/test_conversions/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_conversions/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_conversions/build/test_conversions.target.mk b/test/addons-napi/test_conversions/build/test_conversions.target.mk
deleted file mode 100644
index 87273bde635499..00000000000000
--- a/test/addons-napi/test_conversions/build/test_conversions.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_conversions
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_conversions' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_conversions' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_conversions.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_conversions.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_conversions.node: LIBS := $(LIBS)
-$(builddir)/test_conversions.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_conversions.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_conversions.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_conversions.node
-# Add target alias
-.PHONY: test_conversions
-test_conversions: $(builddir)/test_conversions.node
-
-# Short alias for building this executable.
-.PHONY: test_conversions.node
-test_conversions.node: $(builddir)/test_conversions.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_conversions.node
-
diff --git a/test/addons-napi/test_dataview/build/Makefile b/test/addons-napi/test_dataview/build/Makefile
deleted file mode 100644
index 9dd4843f8cf222..00000000000000
--- a/test/addons-napi/test_dataview/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_dataview.target.mk)))),)
- include test_dataview.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_dataview/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_dataview" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_dataview/build/binding.Makefile b/test/addons-napi/test_dataview/build/binding.Makefile
deleted file mode 100644
index 9fa814f4408abd..00000000000000
--- a/test/addons-napi/test_dataview/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_dataview
diff --git a/test/addons-napi/test_dataview/build/config.gypi b/test/addons-napi/test_dataview/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_dataview/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_dataview/build/gyp-mac-tool b/test/addons-napi/test_dataview/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_dataview/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_dataview/build/test_dataview.target.mk b/test/addons-napi/test_dataview/build/test_dataview.target.mk
deleted file mode 100644
index 1a2bb312a489ba..00000000000000
--- a/test/addons-napi/test_dataview/build/test_dataview.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_dataview
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_dataview' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_dataview' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_dataview.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_dataview.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_dataview.node: LIBS := $(LIBS)
-$(builddir)/test_dataview.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_dataview.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_dataview.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_dataview.node
-# Add target alias
-.PHONY: test_dataview
-test_dataview: $(builddir)/test_dataview.node
-
-# Short alias for building this executable.
-.PHONY: test_dataview.node
-test_dataview.node: $(builddir)/test_dataview.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_dataview.node
-
diff --git a/test/addons-napi/test_env_sharing/build/Makefile b/test/addons-napi/test_env_sharing/build/Makefile
deleted file mode 100644
index b8333ee4553e88..00000000000000
--- a/test/addons-napi/test_env_sharing/build/Makefile
+++ /dev/null
@@ -1,347 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,compare_env.target.mk)))),)
- include compare_env.target.mk
-endif
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,store_env.target.mk)))),)
- include store_env.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_env_sharing/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_env_sharing" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_env_sharing/build/binding.Makefile b/test/addons-napi/test_env_sharing/build/binding.Makefile
deleted file mode 100644
index df6645aac06baf..00000000000000
--- a/test/addons-napi/test_env_sharing/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) compare_env store_env
diff --git a/test/addons-napi/test_env_sharing/build/compare_env.target.mk b/test/addons-napi/test_env_sharing/build/compare_env.target.mk
deleted file mode 100644
index 961db4f5157bcc..00000000000000
--- a/test/addons-napi/test_env_sharing/build/compare_env.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := compare_env
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=compare_env' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=compare_env' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/compare_env.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/compare_env.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/compare_env.node: LIBS := $(LIBS)
-$(builddir)/compare_env.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/compare_env.node: TOOLSET := $(TOOLSET)
-$(builddir)/compare_env.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/compare_env.node
-# Add target alias
-.PHONY: compare_env
-compare_env: $(builddir)/compare_env.node
-
-# Short alias for building this executable.
-.PHONY: compare_env.node
-compare_env.node: $(builddir)/compare_env.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/compare_env.node
-
diff --git a/test/addons-napi/test_env_sharing/build/config.gypi b/test/addons-napi/test_env_sharing/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_env_sharing/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_env_sharing/build/gyp-mac-tool b/test/addons-napi/test_env_sharing/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_env_sharing/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_env_sharing/build/store_env.target.mk b/test/addons-napi/test_env_sharing/build/store_env.target.mk
deleted file mode 100644
index 68930824a7c5e1..00000000000000
--- a/test/addons-napi/test_env_sharing/build/store_env.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := store_env
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=store_env' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=store_env' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/store_env.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/store_env.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/store_env.node: LIBS := $(LIBS)
-$(builddir)/store_env.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/store_env.node: TOOLSET := $(TOOLSET)
-$(builddir)/store_env.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/store_env.node
-# Add target alias
-.PHONY: store_env
-store_env: $(builddir)/store_env.node
-
-# Short alias for building this executable.
-.PHONY: store_env.node
-store_env.node: $(builddir)/store_env.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/store_env.node
-
diff --git a/test/addons-napi/test_error/build/Makefile b/test/addons-napi/test_error/build/Makefile
deleted file mode 100644
index e004646841f8f6..00000000000000
--- a/test/addons-napi/test_error/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_error.target.mk)))),)
- include test_error.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_error/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_error" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_error/build/binding.Makefile b/test/addons-napi/test_error/build/binding.Makefile
deleted file mode 100644
index 30f57e354eb130..00000000000000
--- a/test/addons-napi/test_error/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_error
diff --git a/test/addons-napi/test_error/build/config.gypi b/test/addons-napi/test_error/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_error/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_error/build/gyp-mac-tool b/test/addons-napi/test_error/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_error/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_error/build/test_error.target.mk b/test/addons-napi/test_error/build/test_error.target.mk
deleted file mode 100644
index 6cf5bbf77d3aa5..00000000000000
--- a/test/addons-napi/test_error/build/test_error.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_error
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_error' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_error' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_error.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_error.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_error.node: LIBS := $(LIBS)
-$(builddir)/test_error.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_error.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_error.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_error.node
-# Add target alias
-.PHONY: test_error
-test_error: $(builddir)/test_error.node
-
-# Short alias for building this executable.
-.PHONY: test_error.node
-test_error.node: $(builddir)/test_error.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_error.node
-
diff --git a/test/addons-napi/test_exception/build/Makefile b/test/addons-napi/test_exception/build/Makefile
deleted file mode 100644
index a21781273bbc21..00000000000000
--- a/test/addons-napi/test_exception/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_exception.target.mk)))),)
- include test_exception.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_exception/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_exception" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_exception/build/binding.Makefile b/test/addons-napi/test_exception/build/binding.Makefile
deleted file mode 100644
index 276e036c1017ca..00000000000000
--- a/test/addons-napi/test_exception/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_exception
diff --git a/test/addons-napi/test_exception/build/config.gypi b/test/addons-napi/test_exception/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_exception/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_exception/build/gyp-mac-tool b/test/addons-napi/test_exception/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_exception/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_exception/build/test_exception.target.mk b/test/addons-napi/test_exception/build/test_exception.target.mk
deleted file mode 100644
index f332afc2b79d26..00000000000000
--- a/test/addons-napi/test_exception/build/test_exception.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_exception
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_exception' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_exception' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_exception.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_exception.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_exception.node: LIBS := $(LIBS)
-$(builddir)/test_exception.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_exception.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_exception.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_exception.node
-# Add target alias
-.PHONY: test_exception
-test_exception: $(builddir)/test_exception.node
-
-# Short alias for building this executable.
-.PHONY: test_exception.node
-test_exception.node: $(builddir)/test_exception.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_exception.node
-
diff --git a/test/addons-napi/test_fatal/build/Makefile b/test/addons-napi/test_fatal/build/Makefile
deleted file mode 100644
index f59a23016fa8df..00000000000000
--- a/test/addons-napi/test_fatal/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_fatal.target.mk)))),)
- include test_fatal.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_fatal/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_fatal" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_fatal/build/binding.Makefile b/test/addons-napi/test_fatal/build/binding.Makefile
deleted file mode 100644
index c692e2fed08f96..00000000000000
--- a/test/addons-napi/test_fatal/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_fatal
diff --git a/test/addons-napi/test_fatal/build/config.gypi b/test/addons-napi/test_fatal/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_fatal/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_fatal/build/gyp-mac-tool b/test/addons-napi/test_fatal/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_fatal/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_fatal/build/test_fatal.target.mk b/test/addons-napi/test_fatal/build/test_fatal.target.mk
deleted file mode 100644
index e050fd9d808389..00000000000000
--- a/test/addons-napi/test_fatal/build/test_fatal.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_fatal
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_fatal' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_fatal' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_fatal.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_fatal.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_fatal.node: LIBS := $(LIBS)
-$(builddir)/test_fatal.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_fatal.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_fatal.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_fatal.node
-# Add target alias
-.PHONY: test_fatal
-test_fatal: $(builddir)/test_fatal.node
-
-# Short alias for building this executable.
-.PHONY: test_fatal.node
-test_fatal.node: $(builddir)/test_fatal.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_fatal.node
-
diff --git a/test/addons-napi/test_function/build/Makefile b/test/addons-napi/test_function/build/Makefile
deleted file mode 100644
index d2a27e8fa59336..00000000000000
--- a/test/addons-napi/test_function/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_function.target.mk)))),)
- include test_function.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_function/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_function" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_function/build/binding.Makefile b/test/addons-napi/test_function/build/binding.Makefile
deleted file mode 100644
index 27c9b820226cc4..00000000000000
--- a/test/addons-napi/test_function/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_function
diff --git a/test/addons-napi/test_function/build/config.gypi b/test/addons-napi/test_function/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_function/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_function/build/gyp-mac-tool b/test/addons-napi/test_function/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_function/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_function/build/test_function.target.mk b/test/addons-napi/test_function/build/test_function.target.mk
deleted file mode 100644
index 5b17ff2409e0de..00000000000000
--- a/test/addons-napi/test_function/build/test_function.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_function
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_function' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_function' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_function.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_function.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_function.node: LIBS := $(LIBS)
-$(builddir)/test_function.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_function.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_function.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_function.node
-# Add target alias
-.PHONY: test_function
-test_function: $(builddir)/test_function.node
-
-# Short alias for building this executable.
-.PHONY: test_function.node
-test_function.node: $(builddir)/test_function.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_function.node
-
diff --git a/test/addons-napi/test_general/build/Makefile b/test/addons-napi/test_general/build/Makefile
deleted file mode 100644
index d95e90cd924ca9..00000000000000
--- a/test/addons-napi/test_general/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_general.target.mk)))),)
- include test_general.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_general/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_general" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_general/build/binding.Makefile b/test/addons-napi/test_general/build/binding.Makefile
deleted file mode 100644
index 789cd04d5d342c..00000000000000
--- a/test/addons-napi/test_general/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_general
diff --git a/test/addons-napi/test_general/build/config.gypi b/test/addons-napi/test_general/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_general/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_general/build/gyp-mac-tool b/test/addons-napi/test_general/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_general/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_general/build/test_general.target.mk b/test/addons-napi/test_general/build/test_general.target.mk
deleted file mode 100644
index 446938d67589e4..00000000000000
--- a/test/addons-napi/test_general/build/test_general.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_general
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_general' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_general' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_general.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_general.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_general.node: LIBS := $(LIBS)
-$(builddir)/test_general.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_general.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_general.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_general.node
-# Add target alias
-.PHONY: test_general
-test_general: $(builddir)/test_general.node
-
-# Short alias for building this executable.
-.PHONY: test_general.node
-test_general.node: $(builddir)/test_general.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_general.node
-
diff --git a/test/addons-napi/test_handle_scope/build/Makefile b/test/addons-napi/test_handle_scope/build/Makefile
deleted file mode 100644
index c8a1d43e08f8e8..00000000000000
--- a/test/addons-napi/test_handle_scope/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_handle_scope.target.mk)))),)
- include test_handle_scope.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_handle_scope/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_handle_scope" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_handle_scope/build/binding.Makefile b/test/addons-napi/test_handle_scope/build/binding.Makefile
deleted file mode 100644
index 3999f7aa446a9b..00000000000000
--- a/test/addons-napi/test_handle_scope/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_handle_scope
diff --git a/test/addons-napi/test_handle_scope/build/config.gypi b/test/addons-napi/test_handle_scope/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_handle_scope/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_handle_scope/build/gyp-mac-tool b/test/addons-napi/test_handle_scope/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_handle_scope/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_handle_scope/build/test_handle_scope.target.mk b/test/addons-napi/test_handle_scope/build/test_handle_scope.target.mk
deleted file mode 100644
index decab157b0082c..00000000000000
--- a/test/addons-napi/test_handle_scope/build/test_handle_scope.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_handle_scope
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_handle_scope' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_handle_scope' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_handle_scope.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_handle_scope.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_handle_scope.node: LIBS := $(LIBS)
-$(builddir)/test_handle_scope.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_handle_scope.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_handle_scope.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_handle_scope.node
-# Add target alias
-.PHONY: test_handle_scope
-test_handle_scope: $(builddir)/test_handle_scope.node
-
-# Short alias for building this executable.
-.PHONY: test_handle_scope.node
-test_handle_scope.node: $(builddir)/test_handle_scope.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_handle_scope.node
-
diff --git a/test/addons-napi/test_make_callback/build/Makefile b/test/addons-napi/test_make_callback/build/Makefile
deleted file mode 100644
index f3fd26bef84d07..00000000000000
--- a/test/addons-napi/test_make_callback/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_make_callback/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_make_callback" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_make_callback/build/binding.Makefile b/test/addons-napi/test_make_callback/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/test_make_callback/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/test_make_callback/build/binding.target.mk b/test/addons-napi/test_make_callback/build/binding.target.mk
deleted file mode 100644
index fb7675d45de43e..00000000000000
--- a/test/addons-napi/test_make_callback/build/binding.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/test_make_callback/build/config.gypi b/test/addons-napi/test_make_callback/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_make_callback/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_make_callback/build/gyp-mac-tool b/test/addons-napi/test_make_callback/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_make_callback/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_make_callback_recurse/build/Makefile b/test/addons-napi/test_make_callback_recurse/build/Makefile
deleted file mode 100644
index e7e9365c29f899..00000000000000
--- a/test/addons-napi/test_make_callback_recurse/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,binding.target.mk)))),)
- include binding.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_make_callback_recurse/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_make_callback_recurse" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_make_callback_recurse/build/binding.Makefile b/test/addons-napi/test_make_callback_recurse/build/binding.Makefile
deleted file mode 100644
index 0556c70f61905f..00000000000000
--- a/test/addons-napi/test_make_callback_recurse/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) binding
diff --git a/test/addons-napi/test_make_callback_recurse/build/binding.target.mk b/test/addons-napi/test_make_callback_recurse/build/binding.target.mk
deleted file mode 100644
index fb7675d45de43e..00000000000000
--- a/test/addons-napi/test_make_callback_recurse/build/binding.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := binding
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=binding' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/binding.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: LIBS := $(LIBS)
-$(builddir)/binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/binding.node: TOOLSET := $(TOOLSET)
-$(builddir)/binding.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/binding.node
-# Add target alias
-.PHONY: binding
-binding: $(builddir)/binding.node
-
-# Short alias for building this executable.
-.PHONY: binding.node
-binding.node: $(builddir)/binding.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/binding.node
-
diff --git a/test/addons-napi/test_make_callback_recurse/build/config.gypi b/test/addons-napi/test_make_callback_recurse/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_make_callback_recurse/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_make_callback_recurse/build/gyp-mac-tool b/test/addons-napi/test_make_callback_recurse/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_make_callback_recurse/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_number/build/Makefile b/test/addons-napi/test_number/build/Makefile
deleted file mode 100644
index 49b26eb6dc8f10..00000000000000
--- a/test/addons-napi/test_number/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_number.target.mk)))),)
- include test_number.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_number/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_number" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_number/build/binding.Makefile b/test/addons-napi/test_number/build/binding.Makefile
deleted file mode 100644
index f2bc3a7d0e01e0..00000000000000
--- a/test/addons-napi/test_number/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_number
diff --git a/test/addons-napi/test_number/build/config.gypi b/test/addons-napi/test_number/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_number/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_number/build/gyp-mac-tool b/test/addons-napi/test_number/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_number/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_number/build/test_number.target.mk b/test/addons-napi/test_number/build/test_number.target.mk
deleted file mode 100644
index 15e104d6940ccf..00000000000000
--- a/test/addons-napi/test_number/build/test_number.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_number
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_number' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_number' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_number.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_number.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_number.node: LIBS := $(LIBS)
-$(builddir)/test_number.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_number.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_number.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_number.node
-# Add target alias
-.PHONY: test_number
-test_number: $(builddir)/test_number.node
-
-# Short alias for building this executable.
-.PHONY: test_number.node
-test_number.node: $(builddir)/test_number.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_number.node
-
diff --git a/test/addons-napi/test_object/build/Makefile b/test/addons-napi/test_object/build/Makefile
deleted file mode 100644
index 56c8c664a2412b..00000000000000
--- a/test/addons-napi/test_object/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_object.target.mk)))),)
- include test_object.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_object/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_object" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_object/build/binding.Makefile b/test/addons-napi/test_object/build/binding.Makefile
deleted file mode 100644
index 1e087a29868db5..00000000000000
--- a/test/addons-napi/test_object/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_object
diff --git a/test/addons-napi/test_object/build/config.gypi b/test/addons-napi/test_object/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_object/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_object/build/gyp-mac-tool b/test/addons-napi/test_object/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_object/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_object/build/test_object.target.mk b/test/addons-napi/test_object/build/test_object.target.mk
deleted file mode 100644
index 511a91b3b58d5f..00000000000000
--- a/test/addons-napi/test_object/build/test_object.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_object
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_object' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_object' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_object.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_object.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_object.node: LIBS := $(LIBS)
-$(builddir)/test_object.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_object.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_object.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_object.node
-# Add target alias
-.PHONY: test_object
-test_object: $(builddir)/test_object.node
-
-# Short alias for building this executable.
-.PHONY: test_object.node
-test_object.node: $(builddir)/test_object.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_object.node
-
diff --git a/test/addons-napi/test_promise/build/Makefile b/test/addons-napi/test_promise/build/Makefile
deleted file mode 100644
index f48fdd3f6616bd..00000000000000
--- a/test/addons-napi/test_promise/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_promise.target.mk)))),)
- include test_promise.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_promise/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_promise" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_promise/build/binding.Makefile b/test/addons-napi/test_promise/build/binding.Makefile
deleted file mode 100644
index 8070d2e1136429..00000000000000
--- a/test/addons-napi/test_promise/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_promise
diff --git a/test/addons-napi/test_promise/build/config.gypi b/test/addons-napi/test_promise/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_promise/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_promise/build/gyp-mac-tool b/test/addons-napi/test_promise/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_promise/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_promise/build/test_promise.target.mk b/test/addons-napi/test_promise/build/test_promise.target.mk
deleted file mode 100644
index ba1dcc23a34873..00000000000000
--- a/test/addons-napi/test_promise/build/test_promise.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_promise
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_promise' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_promise' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_promise.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_promise.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_promise.node: LIBS := $(LIBS)
-$(builddir)/test_promise.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_promise.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_promise.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_promise.node
-# Add target alias
-.PHONY: test_promise
-test_promise: $(builddir)/test_promise.node
-
-# Short alias for building this executable.
-.PHONY: test_promise.node
-test_promise.node: $(builddir)/test_promise.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_promise.node
-
diff --git a/test/addons-napi/test_properties/build/Makefile b/test/addons-napi/test_properties/build/Makefile
deleted file mode 100644
index ff0ada5980ca4f..00000000000000
--- a/test/addons-napi/test_properties/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_properties.target.mk)))),)
- include test_properties.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_properties/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_properties" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_properties/build/binding.Makefile b/test/addons-napi/test_properties/build/binding.Makefile
deleted file mode 100644
index 7e506c1667b9e4..00000000000000
--- a/test/addons-napi/test_properties/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_properties
diff --git a/test/addons-napi/test_properties/build/config.gypi b/test/addons-napi/test_properties/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_properties/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_properties/build/gyp-mac-tool b/test/addons-napi/test_properties/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_properties/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_properties/build/test_properties.target.mk b/test/addons-napi/test_properties/build/test_properties.target.mk
deleted file mode 100644
index da754422d29287..00000000000000
--- a/test/addons-napi/test_properties/build/test_properties.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_properties
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_properties' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_properties' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_properties.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_properties.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_properties.node: LIBS := $(LIBS)
-$(builddir)/test_properties.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_properties.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_properties.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_properties.node
-# Add target alias
-.PHONY: test_properties
-test_properties: $(builddir)/test_properties.node
-
-# Short alias for building this executable.
-.PHONY: test_properties.node
-test_properties.node: $(builddir)/test_properties.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_properties.node
-
diff --git a/test/addons-napi/test_reference/build/Makefile b/test/addons-napi/test_reference/build/Makefile
deleted file mode 100644
index 820fb750ac1f48..00000000000000
--- a/test/addons-napi/test_reference/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_reference.target.mk)))),)
- include test_reference.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_reference/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_reference" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_reference/build/binding.Makefile b/test/addons-napi/test_reference/build/binding.Makefile
deleted file mode 100644
index f022b877d237a8..00000000000000
--- a/test/addons-napi/test_reference/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_reference
diff --git a/test/addons-napi/test_reference/build/config.gypi b/test/addons-napi/test_reference/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_reference/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_reference/build/gyp-mac-tool b/test/addons-napi/test_reference/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_reference/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_reference/build/test_reference.target.mk b/test/addons-napi/test_reference/build/test_reference.target.mk
deleted file mode 100644
index 03ba8d4d99b2ec..00000000000000
--- a/test/addons-napi/test_reference/build/test_reference.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_reference
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_reference' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_reference' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_reference.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_reference.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_reference.node: LIBS := $(LIBS)
-$(builddir)/test_reference.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_reference.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_reference.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_reference.node
-# Add target alias
-.PHONY: test_reference
-test_reference: $(builddir)/test_reference.node
-
-# Short alias for building this executable.
-.PHONY: test_reference.node
-test_reference.node: $(builddir)/test_reference.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_reference.node
-
diff --git a/test/addons-napi/test_string/build/Makefile b/test/addons-napi/test_string/build/Makefile
deleted file mode 100644
index 63e02566c436f9..00000000000000
--- a/test/addons-napi/test_string/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_string.target.mk)))),)
- include test_string.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_string/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_string" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_string/build/binding.Makefile b/test/addons-napi/test_string/build/binding.Makefile
deleted file mode 100644
index cbc34b564e10e6..00000000000000
--- a/test/addons-napi/test_string/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_string
diff --git a/test/addons-napi/test_string/build/config.gypi b/test/addons-napi/test_string/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_string/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_string/build/gyp-mac-tool b/test/addons-napi/test_string/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_string/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_string/build/test_string.target.mk b/test/addons-napi/test_string/build/test_string.target.mk
deleted file mode 100644
index 2338cc90a4d1ff..00000000000000
--- a/test/addons-napi/test_string/build/test_string.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_string
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_string' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_string' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_string.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_string.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_string.node: LIBS := $(LIBS)
-$(builddir)/test_string.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_string.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_string.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_string.node
-# Add target alias
-.PHONY: test_string
-test_string: $(builddir)/test_string.node
-
-# Short alias for building this executable.
-.PHONY: test_string.node
-test_string.node: $(builddir)/test_string.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_string.node
-
diff --git a/test/addons-napi/test_symbol/build/Makefile b/test/addons-napi/test_symbol/build/Makefile
deleted file mode 100644
index 0ec2231666a7b5..00000000000000
--- a/test/addons-napi/test_symbol/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_symbol.target.mk)))),)
- include test_symbol.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_symbol/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_symbol" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_symbol/build/binding.Makefile b/test/addons-napi/test_symbol/build/binding.Makefile
deleted file mode 100644
index a19a6d8e4e0ee8..00000000000000
--- a/test/addons-napi/test_symbol/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_symbol
diff --git a/test/addons-napi/test_symbol/build/config.gypi b/test/addons-napi/test_symbol/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_symbol/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_symbol/build/gyp-mac-tool b/test/addons-napi/test_symbol/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_symbol/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_symbol/build/test_symbol.target.mk b/test/addons-napi/test_symbol/build/test_symbol.target.mk
deleted file mode 100644
index bbf53c2dac39d8..00000000000000
--- a/test/addons-napi/test_symbol/build/test_symbol.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_symbol
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_symbol' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_symbol' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_symbol.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_symbol.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_symbol.node: LIBS := $(LIBS)
-$(builddir)/test_symbol.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_symbol.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_symbol.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_symbol.node
-# Add target alias
-.PHONY: test_symbol
-test_symbol: $(builddir)/test_symbol.node
-
-# Short alias for building this executable.
-.PHONY: test_symbol.node
-test_symbol.node: $(builddir)/test_symbol.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_symbol.node
-
diff --git a/test/addons-napi/test_typedarray/build/Makefile b/test/addons-napi/test_typedarray/build/Makefile
deleted file mode 100644
index c64c416be7b325..00000000000000
--- a/test/addons-napi/test_typedarray/build/Makefile
+++ /dev/null
@@ -1,342 +0,0 @@
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := ..
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= .
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= Release
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-
-
-CC.target ?= $(CC)
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= $(CXX)
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= $(LINK)
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= gcc
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= g++
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= $(CXX.host)
-LDFLAGS.host ?=
-AR.host ?= ar
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),?,$1)
-unreplace_spaces = $(subst ?,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters.
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
-
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-
-
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
-
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain ? instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\
- for p in $(POSTBUILDS); do\
- eval $$p;\
- E=$$?;\
- if [ $$E -ne 0 ]; then\
- break;\
- fi;\
- done;\
- if [ $$E -ne 0 ]; then\
- rm -rf "$@";\
- exit $$E;\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
-# spaces already and dirx strips the ? characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word 2,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "all" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: all
-all:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-TOOLSET := target
-# Suffix rules, putting all outputs into $(obj).
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-
-ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
- $(findstring $(join ^,$(prefix)),\
- $(join ^,test_typedarray.target.mk)))),)
- include test_typedarray.target.mk
-endif
-
-quiet_cmd_regen_makefile = ACTION Regenerating $@
-cmd_regen_makefile = cd $(srcdir); /Users/trott/io.js/deps/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/trott/io.js/test/addons-napi/test_typedarray/build/config.gypi -I/Users/trott/io.js/deps/npm/node_modules/node-gyp/addon.gypi -I/Users/trott/io.js/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/trott/io.js" "-Dnode_gyp_dir=/Users/trott/io.js/deps/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/trott/io.js/$(Configuration)/node.lib" "-Dmodule_root_dir=/Users/trott/io.js/test/addons-napi/test_typedarray" "-Dnode_engine=v8" binding.gyp
-Makefile: $(srcdir)/../../../deps/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../common.gypi
- $(call do_cmd,regen_makefile)
-
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
diff --git a/test/addons-napi/test_typedarray/build/binding.Makefile b/test/addons-napi/test_typedarray/build/binding.Makefile
deleted file mode 100644
index 940d823bd8cd5e..00000000000000
--- a/test/addons-napi/test_typedarray/build/binding.Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-export builddir_name ?= ./build/.
-.PHONY: all
-all:
- $(MAKE) test_typedarray
diff --git a/test/addons-napi/test_typedarray/build/config.gypi b/test/addons-napi/test_typedarray/build/config.gypi
deleted file mode 100644
index 6a35cfccbb772b..00000000000000
--- a/test/addons-napi/test_typedarray/build/config.gypi
+++ /dev/null
@@ -1,70 +0,0 @@
-# Do not edit. File was generated by node-gyp's "configure" step
-{
- "target_defaults": {
- "cflags": [],
- "default_configuration": "Release",
- "defines": [],
- "include_dirs": [],
- "libraries": []
- },
- "variables": {
- "asan": 0,
- "coverage": "false",
- "debug_devtools": "node",
- "debug_http2": "false",
- "debug_nghttp2": "false",
- "force_dynamic_crt": 0,
- "host_arch": "x64",
- "icu_data_file": "icudt59l.dat",
- "icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
- "icu_endianness": "l",
- "icu_gyp_path": "tools/icu/icu-generic.gyp",
- "icu_locales": "en,root",
- "icu_path": "deps/icu-small",
- "icu_small": "true",
- "icu_ver_major": "59",
- "llvm_version": 0,
- "node_byteorder": "little",
- "node_enable_d8": "false",
- "node_enable_v8_vtunejit": "false",
- "node_install_npm": "true",
- "node_module_version": 58,
- "node_no_browser_globals": "false",
- "node_prefix": "/usr/local",
- "node_release_urlbase": "",
- "node_shared": "false",
- "node_shared_cares": "false",
- "node_shared_http_parser": "false",
- "node_shared_libuv": "false",
- "node_shared_openssl": "false",
- "node_shared_zlib": "false",
- "node_tag": "",
- "node_use_bundled_v8": "true",
- "node_use_dtrace": "true",
- "node_use_etw": "false",
- "node_use_lttng": "false",
- "node_use_openssl": "true",
- "node_use_perfctr": "false",
- "node_use_v8_platform": "true",
- "node_without_node_options": "false",
- "openssl_fips": "",
- "openssl_no_asm": 0,
- "shlib_suffix": "58.dylib",
- "target_arch": "x64",
- "uv_parent_path": "/deps/uv/",
- "uv_use_dtrace": "true",
- "v8_enable_gdbjit": 0,
- "v8_enable_i18n_support": 1,
- "v8_enable_inspector": 1,
- "v8_no_strict_aliasing": 1,
- "v8_optimized_debug": 0,
- "v8_promise_internal_field_count": 1,
- "v8_random_seed": 0,
- "v8_trace_maps": 0,
- "v8_use_snapshot": "true",
- "want_separate_host_toolset": 0,
- "xcode_version": "8.0",
- "nodedir": "/Users/trott/io.js",
- "standalone_static_library": 1
- }
-}
diff --git a/test/addons-napi/test_typedarray/build/gyp-mac-tool b/test/addons-napi/test_typedarray/build/gyp-mac-tool
deleted file mode 100755
index 8ef02b0493a003..00000000000000
--- a/test/addons-napi/test_typedarray/build/gyp-mac-tool
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# Generated by gyp. Do not edit.
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test/addons-napi/test_typedarray/build/test_typedarray.target.mk b/test/addons-napi/test_typedarray/build/test_typedarray.target.mk
deleted file mode 100644
index 3f0c4a421abc27..00000000000000
--- a/test/addons-napi/test_typedarray/build/test_typedarray.target.mk
+++ /dev/null
@@ -1,178 +0,0 @@
-# This file is generated by gyp; do not edit.
-
-TOOLSET := target
-TARGET := test_typedarray
-DEFS_Debug := \
- '-DNODE_GYP_MODULE_NAME=test_typedarray' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION' \
- '-DDEBUG' \
- '-D_DEBUG' \
- '-DV8_ENABLE_CHECKS'
-
-# Flags passed to all source files.
-CFLAGS_Debug := \
- -O0 \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Debug := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Debug := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Debug :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Debug :=
-
-INCS_Debug := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-DEFS_Release := \
- '-DNODE_GYP_MODULE_NAME=test_typedarray' \
- '-DUSING_UV_SHARED=1' \
- '-DUSING_V8_SHARED=1' \
- '-DV8_DEPRECATION_WARNINGS=1' \
- '-D_DARWIN_USE_64_BIT_INODE=1' \
- '-D_LARGEFILE_SOURCE' \
- '-D_FILE_OFFSET_BITS=64' \
- '-DBUILDING_NODE_EXTENSION'
-
-# Flags passed to all source files.
-CFLAGS_Release := \
- -Os \
- -gdwarf-2 \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -Wall \
- -Wendif-labels \
- -W \
- -Wno-unused-parameter
-
-# Flags passed to only C files.
-CFLAGS_C_Release := \
- -fno-strict-aliasing
-
-# Flags passed to only C++ files.
-CFLAGS_CC_Release := \
- -std=gnu++0x \
- -stdlib=libc++ \
- -fno-rtti \
- -fno-exceptions \
- -fno-threadsafe-statics \
- -fno-strict-aliasing
-
-# Flags passed to only ObjC files.
-CFLAGS_OBJC_Release :=
-
-# Flags passed to only ObjC++ files.
-CFLAGS_OBJCC_Release :=
-
-INCS_Release := \
- -I/Users/trott/io.js/include/node \
- -I/Users/trott/io.js/src \
- -I/Users/trott/io.js/deps/uv/include \
- -I/Users/trott/io.js/deps/v8/include
-
-OBJS := \
- $(obj).target/$(TARGET)/test_typedarray.o
-
-# Add to the list of files we specially track dependencies for.
-all_deps += $(OBJS)
-
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.
-$(OBJS): TOOLSET := $(TOOLSET)
-$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
-$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
-$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
-
-# Suffix rules, putting all outputs into $(obj).
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# Try building from generated source, too.
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-
-# End of this set of suffix rules
-### Rules for final target.
-LDFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Debug := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LDFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first \
- -mmacosx-version-min=10.7 \
- -arch x86_64 \
- -L$(builddir) \
- -stdlib=libc++
-
-LIBTOOLFLAGS_Release := \
- -undefined dynamic_lookup \
- -Wl,-no_pie \
- -Wl,-search_paths_first
-
-LIBS :=
-
-$(builddir)/test_typedarray.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
-$(builddir)/test_typedarray.node: LIBS := $(LIBS)
-$(builddir)/test_typedarray.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
-$(builddir)/test_typedarray.node: TOOLSET := $(TOOLSET)
-$(builddir)/test_typedarray.node: $(OBJS) FORCE_DO_CMD
- $(call do_cmd,solink_module)
-
-all_deps += $(builddir)/test_typedarray.node
-# Add target alias
-.PHONY: test_typedarray
-test_typedarray: $(builddir)/test_typedarray.node
-
-# Short alias for building this executable.
-.PHONY: test_typedarray.node
-test_typedarray.node: $(builddir)/test_typedarray.node
-
-# Add executable to "all" target.
-.PHONY: all
-all: $(builddir)/test_typedarray.node
-
From fd7d1990db1cd6b2f5d400b8f9295c4368cfdad8 Mon Sep 17 00:00:00 2001
From: Kyle Farnung
Date: Tue, 27 Feb 2018 10:58:28 -0800
Subject: [PATCH 007/227] test: remove orphaned entries from status
PR-URL: https://github.com/nodejs/node/pull/19042
Reviewed-By: Myles Borins
---
test/inspector/inspector.status | 1 -
test/known_issues/known_issues.status | 2 --
test/sequential/sequential.status | 1 -
3 files changed, 4 deletions(-)
diff --git a/test/inspector/inspector.status b/test/inspector/inspector.status
index 070d817b2c3ab2..ed6a782b9031a7 100644
--- a/test/inspector/inspector.status
+++ b/test/inspector/inspector.status
@@ -5,6 +5,5 @@ prefix inspector
# sample-test : PASS,FLAKY
[true] # This section applies to all platforms
-test-inspector-port-zero-cluster : PASS,FLAKY
[$system==win32]
diff --git a/test/known_issues/known_issues.status b/test/known_issues/known_issues.status
index 46c8ed32741c7d..e21913e232c03f 100644
--- a/test/known_issues/known_issues.status
+++ b/test/known_issues/known_issues.status
@@ -7,8 +7,6 @@ prefix known_issues
[true] # This section applies to all platforms
[$system==win32]
-test-stdout-buffer-flush-on-exit: SKIP
-test-cluster-disconnect-handles: SKIP
[$system==linux]
diff --git a/test/sequential/sequential.status b/test/sequential/sequential.status
index d8ef1ad44a4624..8467864395ff66 100644
--- a/test/sequential/sequential.status
+++ b/test/sequential/sequential.status
@@ -7,7 +7,6 @@ prefix sequential
[true] # This section applies to all platforms
[$system==win32]
-test-inspector-stop-profile-after-done: PASS, FLAKY
[$system==linux]
From dbe70b744c40611f6900755d57f6aaf90f8d805f Mon Sep 17 00:00:00 2001
From: Luigi Pinca
Date: Wed, 17 Jan 2018 15:44:49 +0100
Subject: [PATCH 008/227] http: free the parser before emitting 'upgrade'
Ensure that the parser is freed before emitting the 'connect' or
'upgrade' event.
PR-URL: https://github.com/nodejs/node/pull/18209
Reviewed-By: James M Snell
Reviewed-By: Anna Henningsen
Reviewed-By: Jon Moss
Reviewed-By: Fedor Indutny
Reviewed-By: Ruben Bridgewater
---
lib/_http_client.js | 2 +-
.../test-http-parser-freed-before-upgrade.js | 33 +++++++++++++++++++
2 files changed, 34 insertions(+), 1 deletion(-)
create mode 100644 test/parallel/test-http-parser-freed-before-upgrade.js
diff --git a/lib/_http_client.js b/lib/_http_client.js
index 4d71ec594743f4..9d2057814133b7 100644
--- a/lib/_http_client.js
+++ b/lib/_http_client.js
@@ -376,6 +376,7 @@ function socketOnData(d) {
socket.removeListener('data', socketOnData);
socket.removeListener('end', socketOnEnd);
parser.finish();
+ freeParser(parser, req, socket);
var bodyHead = d.slice(bytesParsed, d.length);
@@ -398,7 +399,6 @@ function socketOnData(d) {
// Got Upgrade header or CONNECT method, but have no handler.
socket.destroy();
}
- freeParser(parser, req, socket);
} else if (parser.incoming && parser.incoming.complete &&
// When the status code is 100 (Continue), the server will
// send a final response after this client sends a request
diff --git a/test/parallel/test-http-parser-freed-before-upgrade.js b/test/parallel/test-http-parser-freed-before-upgrade.js
new file mode 100644
index 00000000000000..4ba1de9501681c
--- /dev/null
+++ b/test/parallel/test-http-parser-freed-before-upgrade.js
@@ -0,0 +1,33 @@
+'use strict';
+
+const common = require('../common');
+const assert = require('assert');
+const http = require('http');
+
+const server = http.createServer();
+
+server.on('upgrade', common.mustCall((request, socket) => {
+ assert.strictEqual(socket.parser, null);
+ socket.write([
+ 'HTTP/1.1 101 Switching Protocols',
+ 'Connection: Upgrade',
+ 'Upgrade: WebSocket',
+ '\r\n'
+ ].join('\r\n'));
+}));
+
+server.listen(common.mustCall(() => {
+ const request = http.get({
+ port: server.address().port,
+ headers: {
+ Connection: 'Upgrade',
+ Upgrade: 'WebSocket'
+ }
+ });
+
+ request.on('upgrade', common.mustCall((response, socket) => {
+ assert.strictEqual(socket.parser, null);
+ socket.destroy();
+ server.close();
+ }));
+}));
From 64c83d7da9af5684633df48f56625f4dafca05dd Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E9=99=88=E5=88=9A?=
Date: Sat, 20 Jan 2018 08:19:05 +0800
Subject: [PATCH 009/227] stream: simplify `src._readableState` to `state`
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
PR-URL: https://github.com/nodejs/node/pull/18264
Reviewed-By: Weijia Wang
Reviewed-By: Luigi Pinca
Reviewed-By: Michaël Zasso
Reviewed-By: Matteo Collina
---
lib/_stream_readable.js | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js
index 541ef1c305d8bf..dfba99cbc74d7d 100644
--- a/lib/_stream_readable.js
+++ b/lib/_stream_readable.js
@@ -561,8 +561,8 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
if (((state.pipesCount === 1 && state.pipes === dest) ||
(state.pipesCount > 1 && state.pipes.indexOf(dest) !== -1)) &&
!cleanedUp) {
- debug('false write response, pause', src._readableState.awaitDrain);
- src._readableState.awaitDrain++;
+ debug('false write response, pause', state.awaitDrain);
+ state.awaitDrain++;
increasedAwaitDrain = true;
}
src.pause();
From 67fd5205398bf7423de60fd0211325d32e71bac5 Mon Sep 17 00:00:00 2001
From: Ali Ijaz Sheikh
Date: Wed, 24 Jan 2018 10:50:50 -0800
Subject: [PATCH 010/227] doc: reorder section on updating PR branch
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
It makes more sense to provide instructions on how to update the PR
branch before instructions on pushing the commit.
PR-URL: https://github.com/nodejs/node/pull/18355
Reviewed-By: Anna Henningsen
Reviewed-By: Michaël Zasso
Reviewed-By: Richard Lau
Reviewed-By: Colin Ihrig
Reviewed-By: Luigi Pinca
Reviewed-By: James M Snell
Reviewed-By: Jon Moss
---
COLLABORATOR_GUIDE.md | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/COLLABORATOR_GUIDE.md b/COLLABORATOR_GUIDE.md
index 7d8d70cdac38e0..11cf78f69b806f 100644
--- a/COLLABORATOR_GUIDE.md
+++ b/COLLABORATOR_GUIDE.md
@@ -593,20 +593,20 @@ Validate that the commit message is properly formatted using
$ git rev-list upstream/master...HEAD | xargs core-validate-commit
```
+Optional: When landing your own commits, force push the amended commit to the
+branch you used to open the pull request. If your branch is called `bugfix`,
+then the command would be `git push --force-with-lease origin master:bugfix`.
+When the pull request is closed, this will cause the pull request to
+show the purple merged status rather than the red closed status that is
+usually used for pull requests that weren't merged.
+
Time to push it:
```text
$ git push upstream master
```
-* Optional: Force push the amended commit to the branch you used to
-open the pull request. If your branch is called `bugfix`, then the
-command would be `git push --force-with-lease origin master:bugfix`.
-When the pull request is closed, this will cause the pull request to
-show the purple merged status rather than the red closed status that is
-usually used for pull requests that weren't merged. Only do this when
-landing your own contributions.
-* Close the pull request with a "Landed in ``" comment. If
+Close the pull request with a "Landed in ``" comment. If
your pull request shows the purple merged status then you should still
add the "Landed in .." comment if you added
multiple commits.
From 27d3c1a0f4dd29286daf51d6dda6ed9eda762902 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Wed, 31 Jan 2018 09:35:31 -0800
Subject: [PATCH 011/227] doc: add Gibson Fahnestock to TSC
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Welcome Gibson to the TSC!
PR-URL: https://github.com/nodejs/node/pull/18481
Reviewed-By: Colin Ihrig
Reviewed-By: Michaël Zasso
Reviewed-By: Ali Ijaz Sheikh
Reviewed-By: Daniel Bevenius
Reviewed-By: Jon Moss
Reviewed-By: Sakthipriyan Vairamani
Reviewed-By: Tiancheng "Timothy" Gu
Reviewed-By: Evan Lucas
Reviewed-By: Сковорода Никита Андреевич
Reviewed-By: Richard Lau
Reviewed-By: Michael Dawson
---
README.md | 2 ++
1 file changed, 2 insertions(+)
diff --git a/README.md b/README.md
index 55ad1756f15c1e..4530eac002d185 100644
--- a/README.md
+++ b/README.md
@@ -248,6 +248,8 @@ For more information about the governance of the Node.js project, see
**Franziska Hinkelmann** <franziska.hinkelmann@gmail.com> (she/her)
* [Fishrock123](https://github.com/Fishrock123) -
**Jeremiah Senkpiel** <fishrock123@rocketmail.com>
+* [gibfahn](https://github.com/gibfahn) -
+**Gibson Fahnestock** <gibfahn@gmail.com> (he/him)
* [indutny](https://github.com/indutny) -
**Fedor Indutny** <fedor.indutny@gmail.com>
* [jasnell](https://github.com/jasnell) -
From 43839f160149c023c8d270414956141dd945fafe Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Wed, 31 Jan 2018 09:36:51 -0800
Subject: [PATCH 012/227] doc: move Brian White to TSC Emeriti list
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
PR-URL: https://github.com/nodejs/node/pull/18482
Reviewed-By: Colin Ihrig
Reviewed-By: Michaël Zasso
Reviewed-By: Daniel Bevenius
Reviewed-By: Ali Ijaz Sheikh
Reviewed-By: Jon Moss
Reviewed-By: Evan Lucas
Reviewed-By: James M Snell
Reviewed-By: Ruben Bridgewater
Reviewed-By: Michael Dawson
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 4530eac002d185..53471775067632 100644
--- a/README.md
+++ b/README.md
@@ -260,8 +260,6 @@ For more information about the governance of the Node.js project, see
**Matteo Collina** <matteo.collina@gmail.com> (he/him)
* [mhdawson](https://github.com/mhdawson) -
**Michael Dawson** <michael_dawson@ca.ibm.com> (he/him)
-* [mscdex](https://github.com/mscdex) -
-**Brian White** <mscdex@mscdex.net>
* [MylesBorins](https://github.com/MylesBorins) -
**Myles Borins** <myles.borins@gmail.com> (he/him)
* [ofrobots](https://github.com/ofrobots) -
@@ -287,6 +285,8 @@ For more information about the governance of the Node.js project, see
**Isaac Z. Schlueter** <i@izs.me>
* [joshgav](https://github.com/joshgav) -
**Josh Gavant** <josh.gavant@outlook.com>
+* [mscdex](https://github.com/mscdex) -
+**Brian White** <mscdex@mscdex.net>
* [nebrius](https://github.com/nebrius) -
**Bryan Hughes** <bryan@nebri.us>
* [orangemocha](https://github.com/orangemocha) -
From 32089bcbc1c537632b8f89ba4165bdeca404e5c2 Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Wed, 31 Jan 2018 09:53:10 -0800
Subject: [PATCH 013/227] doc: streamline README intro
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Shorten text that is duplicated from website and supply link.
PR-URL: https://github.com/nodejs/node/pull/18483
Reviewed-By: James M Snell
Reviewed-By: Colin Ihrig
Reviewed-By: Сковорода Никита Андреевич
Reviewed-By: Luigi Pinca
Reviewed-By: Joyee Cheung
Reviewed-By: Daniel Bevenius
Reviewed-By: Ruben Bridgewater
---
README.md | 8 +++-----
1 file changed, 3 insertions(+), 5 deletions(-)
diff --git a/README.md b/README.md
index 53471775067632..ad3d838dd58076 100644
--- a/README.md
+++ b/README.md
@@ -7,10 +7,9 @@
-Node.js is a JavaScript runtime built on Chrome's V8 JavaScript engine. Node.js
-uses an event-driven, non-blocking I/O model that makes it lightweight and
-efficient. The Node.js package ecosystem, [npm][], is the largest ecosystem of
-open source libraries in the world.
+Node.js is a JavaScript runtime built on Chrome's V8 JavaScript engine. For
+more information on using Node.js, see the
+[Node.js Website][].
The Node.js project is supported by the
[Node.js Foundation](https://nodejs.org/en/foundation/). Contributions,
@@ -596,7 +595,6 @@ Previous releases may also have been signed with one of the following GPG keys:
* [Contributing to the project][]
* [Working Groups][]
-[npm]: https://www.npmjs.com
[Code of Conduct]: https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md
[Contributing to the project]: CONTRIBUTING.md
[Node.js Help]: https://github.com/nodejs/help
From b2a2a55271b609fd09262509ec7f407006bff472 Mon Sep 17 00:00:00 2001
From: jvelezpo
Date: Thu, 25 Jan 2018 14:47:52 -0500
Subject: [PATCH 014/227] test: verify the shell option works properly on
execFile
Useful for executing in a shell because it accepts arguments as
an array instead of a string as exec does.
Depending on the circumstances,
that can prove to be useful if the arguments are already prepared.
PR-URL: https://github.com/nodejs/node/pull/18384
Reviewed-By: James M Snell
Reviewed-By: Luigi Pinca
Reviewed-By: Ruben Bridgewater
---
test/parallel/test-child-process-execfile.js | 8 ++++++++
test/sequential/test-child-process-execsync.js | 6 ++++++
2 files changed, 14 insertions(+)
diff --git a/test/parallel/test-child-process-execfile.js b/test/parallel/test-child-process-execfile.js
index 62cc7f534dc86b..a64128d6a3ab6b 100644
--- a/test/parallel/test-child-process-execfile.js
+++ b/test/parallel/test-child-process-execfile.js
@@ -6,6 +6,7 @@ const uv = process.binding('uv');
const fixtures = require('../common/fixtures');
const fixture = fixtures.path('exit.js');
+const execOpts = { encoding: 'utf8', shell: true };
{
execFile(
@@ -38,3 +39,10 @@ const fixture = fixtures.path('exit.js');
child.kill();
child.emit('close', code, null);
}
+
+{
+ // Verify the shell option works properly
+ execFile(process.execPath, [fixture, 0], execOpts, common.mustCall((err) => {
+ assert.ifError(err);
+ }));
+}
diff --git a/test/sequential/test-child-process-execsync.js b/test/sequential/test-child-process-execsync.js
index e7c978406c883e..970ed867abed93 100644
--- a/test/sequential/test-child-process-execsync.js
+++ b/test/sequential/test-child-process-execsync.js
@@ -8,6 +8,7 @@ const TIMER = 200;
const SLEEP = 2000;
const start = Date.now();
+const execOpts = { encoding: 'utf8', shell: true };
let err;
let caught = false;
@@ -103,3 +104,8 @@ assert.strictEqual(ret, `${msg}\n`);
return true;
});
}
+
+// Verify the shell option works properly
+assert.doesNotThrow(() => {
+ execFileSync(process.execPath, [], execOpts);
+});
From cc7469eec8bd8e191a1c260f811addfb03e63196 Mon Sep 17 00:00:00 2001
From: Rod Vagg
Date: Tue, 9 Jan 2018 14:48:31 +1100
Subject: [PATCH 015/227] build: allow x86_64 as a dest_cpu alias for x64
x86_64 is a standard arch descriptor on Linux, allow it as an alias for
our preferred descriptor: x64
PR-URL: https://github.com/nodejs/node/pull/18052
Reviewed-By: James M Snell
Reviewed-By: Gireesh Punathil
Reviewed-By: Gibson Fahnestock
Reviewed-By: Joyee Cheung
Reviewed-By: Colin Ihrig
Reviewed-By: Michael Dawson
Reviewed-By: Ruben Bridgewater
---
configure | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/configure b/configure
index e67a15c4753dd3..ac1bdfa527a429 100755
--- a/configure
+++ b/configure
@@ -61,7 +61,7 @@ parser = optparse.OptionParser()
valid_os = ('win', 'mac', 'solaris', 'freebsd', 'openbsd', 'linux',
'android', 'aix')
valid_arch = ('arm', 'arm64', 'ia32', 'mips', 'mipsel', 'mips64el', 'ppc',
- 'ppc64', 'x32','x64', 'x86', 's390', 's390x')
+ 'ppc64', 'x32','x64', 'x86', 'x86_64', 's390', 's390x')
valid_arm_float_abi = ('soft', 'softfp', 'hard')
valid_arm_fpu = ('vfp', 'vfpv3', 'vfpv3-d16', 'neon')
valid_mips_arch = ('loongson', 'r1', 'r2', 'r6', 'rx')
@@ -825,6 +825,9 @@ def configure_node(o):
# the Makefile resets this to x86 afterward
if target_arch == 'x86':
target_arch = 'ia32'
+ # x86_64 is common across linuxes, allow it as an alias for x64
+ if target_arch == 'x86_64':
+ target_arch = 'x64'
o['variables']['host_arch'] = host_arch
o['variables']['target_arch'] = target_arch
o['variables']['node_byteorder'] = sys.byteorder
From 269c2f3ad9f6a1b48b7e7db7341abc7982bd6f6c Mon Sep 17 00:00:00 2001
From: Luigi Pinca
Date: Mon, 29 Jan 2018 10:23:02 +0100
Subject: [PATCH 016/227] net: remove redundant code from _writeGeneric()
The encoding is already handled by `Writable.prototype.write()`.
PR-URL: https://github.com/nodejs/node/pull/18429
Reviewed-By: Matteo Collina
Reviewed-By: James M Snell
Reviewed-By: Anna Henningsen
Reviewed-By: Colin Ihrig
Reviewed-By: Ruben Bridgewater
---
lib/net.js | 8 +-------
1 file changed, 1 insertion(+), 7 deletions(-)
diff --git a/lib/net.js b/lib/net.js
index ac036534d6b402..e91542fb2467a9 100644
--- a/lib/net.js
+++ b/lib/net.js
@@ -710,13 +710,7 @@ Socket.prototype._writeGeneric = function(writev, data, encoding, cb) {
// Retain chunks
if (err === 0) req._chunks = chunks;
} else {
- var enc;
- if (data instanceof Buffer) {
- enc = 'buffer';
- } else {
- enc = encoding;
- }
- err = createWriteReq(req, this._handle, data, enc);
+ err = createWriteReq(req, this._handle, data, encoding);
}
if (err)
From f3c6febedfa1249828ee29c193d241b2c93a9b64 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?=
Date: Sun, 31 Dec 2017 17:39:30 +0100
Subject: [PATCH 017/227] test: update references to archived repository
Backport-PR-URL: https://github.com/nodejs/node/pull/19120
PR-URL: https://github.com/nodejs/node/pull/17924
Reviewed-By: Anna Henningsen
Reviewed-By: Colin Ihrig
Reviewed-By: Jon Moss
Reviewed-By: Luigi Pinca
Reviewed-By: James M Snell
Reviewed-By: Ruben Bridgewater
---
test/parallel/test-assert.js | 4 ++--
test/parallel/test-buffer-alloc.js | 14 +++++++++-----
test/parallel/test-cluster-worker-init.js | 2 +-
test/parallel/test-crypto-binary-default.js | 3 ++-
test/parallel/test-crypto-cipher-decipher.js | 11 +++++++----
test/parallel/test-crypto-hash.js | 3 ++-
test/parallel/test-crypto-random.js | 5 +++--
test/parallel/test-crypto.js | 4 ++--
test/parallel/test-dgram-ref.js | 2 +-
test/parallel/test-dns-regress-7070.js | 3 ++-
test/parallel/test-timers-unref.js | 3 ++-
test/parallel/test-tls-set-encoding.js | 2 +-
test/sequential/test-child-process-execsync.js | 6 ++++--
test/sequential/test-module-loading.js | 6 ++++--
14 files changed, 42 insertions(+), 26 deletions(-)
diff --git a/test/parallel/test-assert.js b/test/parallel/test-assert.js
index aa51eb7e523e2e..55376328b026f2 100644
--- a/test/parallel/test-assert.js
+++ b/test/parallel/test-assert.js
@@ -522,7 +522,7 @@ testAssertionMessage({a: undefined, b: null}, '{ a: undefined, b: null }');
testAssertionMessage({a: NaN, b: Infinity, c: -Infinity},
'{ a: NaN, b: Infinity, c: -Infinity }');
-// #2893
+// https://github.com/nodejs/node-v0.x-archive/issues/2893
try {
// eslint-disable-next-line no-restricted-syntax
assert.throws(function() {
@@ -534,7 +534,7 @@ try {
}
assert.ok(threw);
-// #5292
+// https://github.com/nodejs/node-v0.x-archive/issues/5292
try {
assert.strictEqual(1, 2);
} catch (e) {
diff --git a/test/parallel/test-buffer-alloc.js b/test/parallel/test-buffer-alloc.js
index d0f02b114c1f97..e9053828c885af 100644
--- a/test/parallel/test-buffer-alloc.js
+++ b/test/parallel/test-buffer-alloc.js
@@ -629,7 +629,8 @@ assert.strictEqual('', x.inspect());
}
{
- // #1210 Test UTF-8 string includes null character
+ // https://github.com/nodejs/node-v0.x-archive/pull/1210
+ // Test UTF-8 string includes null character
let buf = Buffer.from('\0');
assert.strictEqual(buf.length, 1);
buf = Buffer.from('\0\0');
@@ -653,7 +654,8 @@ assert.strictEqual('', x.inspect());
}
{
- // #243 Test write() with maxLength
+ // https://github.com/nodejs/node-v0.x-archive/issues/243
+ // Test write() with maxLength
const buf = Buffer.allocUnsafe(4);
buf.fill(0xFF);
assert.strictEqual(buf.write('abcd', 1, 2, 'utf8'), 2);
@@ -937,11 +939,13 @@ assert.throws(() => Buffer.allocUnsafe(8).writeFloatLE(0.0, -1), RangeError);
assert.strictEqual(buf.readIntBE(0, 5), -0x0012000000);
}
-// Regression test for #5482: should throw but not assert in C++ land.
+// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/5482:
+// should throw but not assert in C++ land.
assert.throws(() => Buffer.from('', 'buffer'), TypeError);
-// Regression test for #6111. Constructing a buffer from another buffer
-// should a) work, and b) not corrupt the source buffer.
+// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/6111.
+// Constructing a buffer from another buffer should a) work, and b) not corrupt
+// the source buffer.
{
const a = [...Array(128).keys()]; // [0, 1, 2, 3, ... 126, 127]
const b = Buffer.from(a);
diff --git a/test/parallel/test-cluster-worker-init.js b/test/parallel/test-cluster-worker-init.js
index 4bd43c0ae58807..5a36956832ac69 100644
--- a/test/parallel/test-cluster-worker-init.js
+++ b/test/parallel/test-cluster-worker-init.js
@@ -21,7 +21,7 @@ if (cluster.isMaster) {
worker.send(msg);
});
} else {
- // GH #7998
+ // https://github.com/nodejs/node-v0.x-archive/issues/7998
cluster.worker.on('message', (message) => {
process.send(message === msg);
});
diff --git a/test/parallel/test-crypto-binary-default.js b/test/parallel/test-crypto-binary-default.js
index ce29c4d35d80f8..bfe4be9d822b4e 100644
--- a/test/parallel/test-crypto-binary-default.js
+++ b/test/parallel/test-crypto-binary-default.js
@@ -393,7 +393,8 @@ fileStream.on('close', common.mustCall(function() {
);
}));
-// Issue #2227: unknown digest method should throw an error.
+// Unknown digest method should throw an error:
+// https://github.com/nodejs/node-v0.x-archive/issues/2227
assert.throws(function() {
crypto.createHash('xyzzy');
}, /^Error: Digest method not supported$/);
diff --git a/test/parallel/test-crypto-cipher-decipher.js b/test/parallel/test-crypto-cipher-decipher.js
index 14a1601981f353..94dd29614bb656 100644
--- a/test/parallel/test-crypto-cipher-decipher.js
+++ b/test/parallel/test-crypto-cipher-decipher.js
@@ -70,7 +70,8 @@ testCipher1(Buffer.from('MySecretKey123'));
testCipher2('0123456789abcdef');
testCipher2(Buffer.from('0123456789abcdef'));
-// Base64 padding regression test, see #4837.
+// Base64 padding regression test, see
+// https://github.com/nodejs/node-v0.x-archive/issues/4837.
{
const c = crypto.createCipher('aes-256-cbc', 'secret');
const s = c.update('test', 'utf8', 'base64') + c.final('base64');
@@ -78,7 +79,7 @@ testCipher2(Buffer.from('0123456789abcdef'));
}
// Calling Cipher.final() or Decipher.final() twice should error but
-// not assert. See #4886.
+// not assert. See https://github.com/nodejs/node-v0.x-archive/issues/4886.
{
const c = crypto.createCipher('aes-256-cbc', 'secret');
try { c.final('xxx'); } catch (e) { /* Ignore. */ }
@@ -90,14 +91,16 @@ testCipher2(Buffer.from('0123456789abcdef'));
try { d.final('xxx'); } catch (e) { /* Ignore. */ }
}
-// Regression test for #5482: string to Cipher#update() should not assert.
+// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/5482:
+// string to Cipher#update() should not assert.
{
const c = crypto.createCipher('aes192', '0123456789abcdef');
c.update('update');
c.final();
}
-// #5655 regression tests, 'utf-8' and 'utf8' are identical.
+// https://github.com/nodejs/node-v0.x-archive/issues/5655 regression tests,
+// 'utf-8' and 'utf8' are identical.
{
let c = crypto.createCipher('aes192', '0123456789abcdef');
c.update('update', ''); // Defaults to "utf8".
diff --git a/test/parallel/test-crypto-hash.js b/test/parallel/test-crypto-hash.js
index 25e568910ca068..300d6ba779eddf 100644
--- a/test/parallel/test-crypto-hash.js
+++ b/test/parallel/test-crypto-hash.js
@@ -104,7 +104,8 @@ fileStream.on('close', common.mustCall(function() {
'Test SHA1 of sample.png');
}));
-// Issue #2227: unknown digest method should throw an error.
+// Issue https://github.com/nodejs/node-v0.x-archive/issues/2227: unknown digest
+// method should throw an error.
assert.throws(function() {
crypto.createHash('xyzzy');
}, /Digest method not supported/);
diff --git a/test/parallel/test-crypto-random.js b/test/parallel/test-crypto-random.js
index 386602e4a13f2d..65918bd1ad3cd2 100644
--- a/test/parallel/test-crypto-random.js
+++ b/test/parallel/test-crypto-random.js
@@ -230,8 +230,9 @@ const expectedErrorRegexp = /^TypeError: size must be a number >= 0$/;
}
}
-// #5126, "FATAL ERROR: v8::Object::SetIndexedPropertiesToExternalArrayData()
-// length exceeds max acceptable value"
+// https://github.com/nodejs/node-v0.x-archive/issues/5126,
+// "FATAL ERROR: v8::Object::SetIndexedPropertiesToExternalArrayData() length
+// exceeds max acceptable value"
assert.throws(function() {
crypto.randomBytes((-1 >>> 0) + 1);
}, /^TypeError: size must be a number >= 0$/);
diff --git a/test/parallel/test-crypto.js b/test/parallel/test-crypto.js
index 94443c03d1708e..46409244b5efb0 100644
--- a/test/parallel/test-crypto.js
+++ b/test/parallel/test-crypto.js
@@ -110,8 +110,8 @@ testImmutability(tls.getCiphers);
testImmutability(crypto.getHashes);
testImmutability(crypto.getCurves);
-// Regression tests for #5725: hex input that's not a power of two should
-// throw, not assert in C++ land.
+// Regression tests for https://github.com/nodejs/node-v0.x-archive/pull/5725:
+// hex input that's not a power of two should throw, not assert in C++ land.
assert.throws(function() {
crypto.createCipher('aes192', 'test').update('0', 'hex');
}, common.hasFipsCrypto ? /not supported in FIPS mode/ : /Bad input string/);
diff --git a/test/parallel/test-dgram-ref.js b/test/parallel/test-dgram-ref.js
index 7b79340f924b06..6e2af6f2503e1f 100644
--- a/test/parallel/test-dgram-ref.js
+++ b/test/parallel/test-dgram-ref.js
@@ -2,7 +2,7 @@
const common = require('../common');
const dgram = require('dgram');
-// should not hang, see #1282
+// should not hang, see https://github.com/nodejs/node-v0.x-archive/issues/1282
dgram.createSocket('udp4');
dgram.createSocket('udp6');
diff --git a/test/parallel/test-dns-regress-7070.js b/test/parallel/test-dns-regress-7070.js
index eb939b47559a9d..4b9d2bfbef27a9 100644
--- a/test/parallel/test-dns-regress-7070.js
+++ b/test/parallel/test-dns-regress-7070.js
@@ -3,7 +3,8 @@ require('../common');
const assert = require('assert');
const dns = require('dns');
-// Should not raise assertion error. Issue #7070
+// Should not raise assertion error.
+// Issue https://github.com/nodejs/node-v0.x-archive/issues/7070
assert.throws(() => dns.resolveNs([]), // bad name
/^Error: "name" argument must be a string$/);
assert.throws(() => dns.resolveNs(''), // bad callback
diff --git a/test/parallel/test-timers-unref.js b/test/parallel/test-timers-unref.js
index e4bca54b7d7b03..a62718043d8544 100644
--- a/test/parallel/test-timers-unref.js
+++ b/test/parallel/test-timers-unref.js
@@ -50,7 +50,8 @@ const check_unref = setInterval(() => {
setInterval(() => timeout.unref(), SHORT_TIME);
}
-// Should not assert on args.Holder()->InternalFieldCount() > 0. See #4261.
+// Should not assert on args.Holder()->InternalFieldCount() > 0.
+// See https://github.com/nodejs/node-v0.x-archive/issues/4261.
{
const t = setInterval(() => {}, 1);
process.nextTick(t.unref.bind({}));
diff --git a/test/parallel/test-tls-set-encoding.js b/test/parallel/test-tls-set-encoding.js
index dae4d2c31c2c64..637618b5cb29ac 100644
--- a/test/parallel/test-tls-set-encoding.js
+++ b/test/parallel/test-tls-set-encoding.js
@@ -42,7 +42,7 @@ server.listen(0, function() {
client.on('close', function() {
// readyState is deprecated but we want to make
// sure this isn't triggering an assert in lib/net.js
- // See issue #1069.
+ // See https://github.com/nodejs/node-v0.x-archive/issues/1069.
assert.strictEqual('closed', client.readyState);
// Confirming the buffer string is encoded in ASCII
diff --git a/test/sequential/test-child-process-execsync.js b/test/sequential/test-child-process-execsync.js
index 970ed867abed93..8c751e9c8a2e57 100644
--- a/test/sequential/test-child-process-execsync.js
+++ b/test/sequential/test-child-process-execsync.js
@@ -73,7 +73,8 @@ ret = execFileSync(process.execPath, args, { encoding: 'utf8' });
assert.strictEqual(ret, `${msg}\n`);
-// Verify that the cwd option works - GH #7824
+// Verify that the cwd option works.
+// See https://github.com/nodejs/node-v0.x-archive/issues/7824.
{
const cwd = common.rootDir;
const cmd = common.isWindows ? 'echo %cd%' : 'pwd';
@@ -82,7 +83,8 @@ assert.strictEqual(ret, `${msg}\n`);
assert.strictEqual(response.toString().trim(), cwd);
}
-// Verify that stderr is not accessed when stdio = 'ignore' - GH #7966
+// Verify that stderr is not accessed when stdio = 'ignore'.
+// See https://github.com/nodejs/node-v0.x-archive/issues/7966.
{
assert.throws(function() {
execSync('exit -1', {stdio: 'ignore'});
diff --git a/test/sequential/test-module-loading.js b/test/sequential/test-module-loading.js
index e16aef6c02a01e..6b21e3ed21abc5 100644
--- a/test/sequential/test-module-loading.js
+++ b/test/sequential/test-module-loading.js
@@ -181,7 +181,8 @@ const child = require('../fixtures/module-require/child/');
assert.strictEqual(child.loaded, parent.loaded);
-// #1357 Loading JSON files with require()
+// Loading JSON files with require()
+// See https://github.com/nodejs/node-v0.x-archive/issues/1357.
const json = require('../fixtures/packages/main/package.json');
assert.deepStrictEqual(json, {
name: 'package-name',
@@ -289,7 +290,8 @@ process.on('exit', function() {
});
-// #1440 Loading files with a byte order marker.
+// Loading files with a byte order marker.
+// See https://github.com/nodejs/node-v0.x-archive/issues/1440.
assert.strictEqual(42, require('../fixtures/utf8-bom.js'));
assert.strictEqual(42, require('../fixtures/utf8-bom.json'));
From 72a5710b713320ca1b69f2395bfea2b1e775afa0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?=
Date: Sun, 31 Dec 2017 18:06:40 +0100
Subject: [PATCH 018/227] readline: update references to archived repository
Backport-PR-URL: https://github.com/nodejs/node/pull/19120
PR-URL: https://github.com/nodejs/node/pull/17924
Reviewed-By: Anna Henningsen
Reviewed-By: Colin Ihrig
Reviewed-By: Jon Moss
Reviewed-By: Luigi Pinca
Reviewed-By: James M Snell
Reviewed-By: Ruben Bridgewater
---
lib/readline.js | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/lib/readline.js b/lib/readline.js
index ca99fe15c0a593..a3a3452abacc8c 100644
--- a/lib/readline.js
+++ b/lib/readline.js
@@ -700,7 +700,8 @@ Interface.prototype._moveCursor = function(dx) {
Interface.prototype._ttyWrite = function(s, key) {
key = key || {};
- // Ignore escape key - Fixes #2876
+ // Ignore escape key, fixes
+ // https://github.com/nodejs/node-v0.x-archive/issues/2876.
if (key.name === 'escape') return;
if (key.ctrl && key.shift) {
From 49d8c2e8aef6e2e5d5f80cc7f0adf313cb1df5d5 Mon Sep 17 00:00:00 2001
From: Yihong Wang
Date: Mon, 4 Dec 2017 16:07:53 -0800
Subject: [PATCH 019/227] build: refine static and shared lib build
Refine the static and shared lib build process in order
to integrate static and shared lib verfication into CI.
When building both static and shared lib, we still build
node executable now and it uses the shared and static lib.
Signed-off-by: Yihong Wang
Refs: https://github.com/nodejs/node/issues/14158
Backport-PR-URL: https://github.com/nodejs/node/pull/19050
PR-URL: https://github.com/nodejs/node/pull/17604
Reviewed-By: Bartosz Sosnowski
Reviewed-By: Ben Noordhuis
Reviewed-By: Daniel Bevenius
---
configure | 9 +-
node.gyp | 442 +++++++++++++++++++++++++++++++++++++++--------
node.gypi | 278 +++++++++++------------------
src/node_main.cc | 1 +
4 files changed, 479 insertions(+), 251 deletions(-)
diff --git a/configure b/configure
index ac1bdfa527a429..dd72cc332436a2 100755
--- a/configure
+++ b/configure
@@ -845,7 +845,6 @@ def configure_node(o):
configure_mips(o)
if flavor == 'aix':
- o['variables']['node_core_target_name'] = 'node_base'
o['variables']['node_target_type'] = 'static_library'
if target_arch in ('x86', 'x64', 'ia32', 'x32'):
@@ -945,6 +944,13 @@ def configure_node(o):
else:
o['variables']['coverage'] = 'false'
+ if options.shared:
+ o['variables']['node_target_type'] = 'shared_library'
+ elif options.enable_static:
+ o['variables']['node_target_type'] = 'static_library'
+ else:
+ o['variables']['node_target_type'] = 'executable'
+
def configure_library(lib, output):
shared_lib = 'shared_' + lib
output['variables']['node_' + shared_lib] = b(getattr(options, shared_lib))
@@ -1440,6 +1446,7 @@ config = {
'BUILDTYPE': 'Debug' if options.debug else 'Release',
'USE_XCODE': str(int(options.use_xcode or 0)),
'PYTHON': sys.executable,
+ 'NODE_TARGET_TYPE': variables['node_target_type'],
}
if options.prefix:
diff --git a/node.gyp b/node.gyp
index 8a7af9bfd1e50a..7dff6bf3f22c1e 100644
--- a/node.gyp
+++ b/node.gyp
@@ -21,6 +21,8 @@
'node_v8_options%': '',
'node_enable_v8_vtunejit%': 'false',
'node_core_target_name%': 'node',
+ 'node_lib_target_name%': 'node_lib',
+ 'node_intermediate_lib_type%': 'static_library',
'library_files': [
'lib/internal/bootstrap_node.js',
'lib/_debug_agent.js',
@@ -111,6 +113,17 @@
'conditions': [
[ 'node_shared=="true"', {
'node_target_type%': 'shared_library',
+ 'conditions': [
+ ['OS=="aix"', {
+ # For AIX, always generate static library first,
+ # It needs an extra step to generate exp and
+ # then use both static lib and exp to create
+ # shared lib.
+ 'node_intermediate_lib_type': 'static_library',
+ }, {
+ 'node_intermediate_lib_type': 'shared_library',
+ }],
+ ],
}, {
'node_target_type%': 'executable',
}],
@@ -127,7 +140,81 @@
'targets': [
{
'target_name': '<(node_core_target_name)',
- 'type': '<(node_target_type)',
+ 'type': 'executable',
+ 'sources': [
+ 'src/node_main.cc'
+ ],
+ 'include_dirs': [
+ 'src',
+ 'deps/v8/include',
+ ],
+ 'conditions': [
+ [ 'node_intermediate_lib_type=="static_library" and '
+ 'node_shared=="true" and OS=="aix"', {
+ # For AIX, shared lib is linked by static lib and .exp. In the
+ # case here, the executable needs to link to shared lib.
+ # Therefore, use 'node_aix_shared' target to generate the
+ # shared lib and then executable.
+ 'dependencies': [ 'node_aix_shared' ],
+ }, {
+ 'dependencies': [ '<(node_lib_target_name)' ],
+ }],
+ [ 'node_intermediate_lib_type=="static_library" and '
+ 'node_shared=="false"', {
+ 'includes': [
+ 'node.gypi'
+ ],
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-Wl,-force_load,<(PRODUCT_DIR)/<(STATIC_LIB_PREFIX)'
+ '<(node_core_target_name)<(STATIC_LIB_SUFFIX)',
+ ],
+ },
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'AdditionalOptions': [
+ '/WHOLEARCHIVE:<(PRODUCT_DIR)\\lib\\'
+ '<(node_core_target_name)<(STATIC_LIB_SUFFIX)',
+ ],
+ },
+ },
+ 'conditions': [
+ ['OS in "linux freebsd openbsd solaris android"', {
+ 'ldflags': [
+ '-Wl,--whole-archive,<(OBJ_DIR)/<(STATIC_LIB_PREFIX)'
+ '<(node_core_target_name)<(STATIC_LIB_SUFFIX)',
+ '-Wl,--no-whole-archive',
+ ],
+ }],
+ [ 'OS=="win"', {
+ 'sources': [ 'src/res/node.rc' ],
+ 'conditions': [
+ [ 'node_use_etw=="true"', {
+ 'sources': [
+ 'tools/msvs/genfiles/node_etw_provider.rc'
+ ],
+ }],
+ [ 'node_use_perfctr=="true"', {
+ 'sources': [
+ 'tools/msvs/genfiles/node_perfctr_provider.rc',
+ ],
+ }]
+ ],
+ }],
+ ],
+ }],
+ [ 'node_intermediate_lib_type=="shared_library" and OS=="win"', {
+ # On Windows, having the same name for both executable and shared
+ # lib causes filename collision. Need a different PRODUCT_NAME for
+ # the executable and rename it back to node.exe later
+ 'product_name': '<(node_core_target_name)-win',
+ }],
+ ],
+ },
+ {
+ 'target_name': '<(node_lib_target_name)',
+ 'type': '<(node_intermediate_lib_type)',
+ 'product_name': '<(node_core_target_name)',
'dependencies': [
'node_js2c#host',
@@ -139,7 +226,6 @@
'include_dirs': [
'src',
- 'tools/msvs/genfiles',
'deps/uv/src/ares',
'<(SHARED_INTERMEDIATE_DIR)',
],
@@ -161,7 +247,6 @@
'src/node_contextify.cc',
'src/node_file.cc',
'src/node_http_parser.cc',
- 'src/node_main.cc',
'src/node_os.cc',
'src/node_revert.cc',
'src/node_url.cc',
@@ -249,7 +334,168 @@
'conditions': [
[ 'node_shared=="true" and node_module_version!="" and OS!="win"', {
'product_extension': '<(shlib_suffix)',
- }]
+ }],
+ ['node_shared=="true" and OS=="aix"', {
+ 'product_name': 'node_base',
+ }],
+ [ 'v8_inspector=="true"', {
+ 'defines': [
+ 'HAVE_INSPECTOR=1',
+ ],
+ 'sources': [
+ 'src/inspector_agent.cc',
+ 'src/inspector_socket.cc',
+ 'src/inspector_agent.h',
+ 'src/inspector_socket.h',
+ ],
+ 'dependencies': [
+ 'deps/v8_inspector/third_party/v8_inspector/platform/'
+ 'v8_inspector/v8_inspector.gyp:v8_inspector_stl',
+ 'deps/v8_inspector/third_party/v8_inspector/platform/'
+ 'v8_inspector/v8_inspector.gyp:protocol_sources_stl',
+ 'v8_inspector_compress_protocol_json#host',
+ ],
+ 'include_dirs': [
+ 'deps/v8_inspector/third_party/v8_inspector',
+ '<(SHARED_INTERMEDIATE_DIR)/blink', # for inspector
+ ],
+ }, {
+ 'defines': [ 'HAVE_INSPECTOR=0' ]
+ }],
+ [ 'OS=="win"', {
+ 'sources': [
+ 'src/backtrace_win32.cc',
+ ],
+ 'conditions': [
+ [ 'node_intermediate_lib_type!="static_library"', {
+ 'sources': [
+ 'src/res/node.rc',
+ ],
+ }],
+ ],
+ 'defines!': [
+ 'NODE_PLATFORM="win"',
+ ],
+ 'defines': [
+ 'FD_SETSIZE=1024',
+ # we need to use node's preferred "win32" rather than gyp's preferred "win"
+ 'NODE_PLATFORM="win32"',
+ '_UNICODE=1',
+ ],
+ 'libraries': [ '-lpsapi.lib' ]
+ }, { # POSIX
+ 'defines': [ '__POSIX__' ],
+ 'sources': [ 'src/backtrace_posix.cc' ],
+ }],
+ [ 'node_use_etw=="true"', {
+ 'defines': [ 'HAVE_ETW=1' ],
+ 'dependencies': [ 'node_etw' ],
+ 'include_dirs': [
+ 'src',
+ 'tools/msvs/genfiles',
+ '<(SHARED_INTERMEDIATE_DIR)' # for node_natives.h
+ ],
+ 'sources': [
+ 'src/node_win32_etw_provider.h',
+ 'src/node_win32_etw_provider-inl.h',
+ 'src/node_win32_etw_provider.cc',
+ 'src/node_dtrace.cc',
+ 'tools/msvs/genfiles/node_etw_provider.h',
+ ],
+ 'conditions': [
+ ['node_intermediate_lib_type != "static_library"', {
+ 'sources': [
+ 'tools/msvs/genfiles/node_etw_provider.rc',
+ ],
+ }],
+ ],
+ }],
+ [ 'node_use_perfctr=="true"', {
+ 'defines': [ 'HAVE_PERFCTR=1' ],
+ 'dependencies': [ 'node_perfctr' ],
+ 'include_dirs': [
+ 'src',
+ 'tools/msvs/genfiles',
+ '<(SHARED_INTERMEDIATE_DIR)' # for node_natives.h
+ ],
+ 'sources': [
+ 'src/node_win32_perfctr_provider.h',
+ 'src/node_win32_perfctr_provider.cc',
+ 'src/node_counters.cc',
+ 'src/node_counters.h',
+ ],
+ 'conditions': [
+ ['node_intermediate_lib_type != "static_library"', {
+ 'sources': [
+ 'tools/msvs/genfiles/node_perfctr_provider.rc',
+ ],
+ }],
+ ],
+ }],
+ [ 'node_use_lttng=="true"', {
+ 'defines': [ 'HAVE_LTTNG=1' ],
+ 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)' ],
+ 'libraries': [ '-llttng-ust' ],
+ 'include_dirs': [
+ 'src',
+ 'tools/msvs/genfiles',
+ '<(SHARED_INTERMEDIATE_DIR)' # for node_natives.h
+ ],
+ 'sources': [
+ 'src/node_lttng.cc'
+ ],
+ }],
+ [ 'node_use_dtrace=="true"', {
+ 'defines': [ 'HAVE_DTRACE=1' ],
+ 'dependencies': [
+ 'node_dtrace_header',
+ 'specialize_node_d',
+ ],
+ 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)' ],
+ #
+ # DTrace is supported on linux, solaris, mac, and bsd. There are
+ # three object files associated with DTrace support, but they're
+ # not all used all the time:
+ #
+ # node_dtrace.o all configurations
+ # node_dtrace_ustack.o not supported on mac and linux
+ # node_dtrace_provider.o All except OS X. "dtrace -G" is not
+ # used on OS X.
+ #
+ # Note that node_dtrace_provider.cc and node_dtrace_ustack.cc do not
+ # actually exist. They're listed here to trick GYP into linking the
+ # corresponding object files into the final "node" executable. These
+ # object files are generated by "dtrace -G" using custom actions
+ # below, and the GYP-generated Makefiles will properly build them when
+ # needed.
+ #
+ 'sources': [ 'src/node_dtrace.cc' ],
+ 'conditions': [
+ [ 'OS=="linux"', {
+ 'sources': [
+ '<(SHARED_INTERMEDIATE_DIR)/node_dtrace_provider.o'
+ ],
+ }],
+ [ 'OS!="mac" and OS!="linux"', {
+ 'sources': [
+ 'src/node_dtrace_ustack.cc',
+ 'src/node_dtrace_provider.cc',
+ ]
+ }
+ ] ]
+ } ],
+ [ 'node_use_openssl=="true"', {
+ 'sources': [
+ 'src/node_crypto.cc',
+ 'src/node_crypto_bio.cc',
+ 'src/node_crypto_clienthello.cc',
+ 'src/node_crypto.h',
+ 'src/node_crypto_bio.h',
+ 'src/node_crypto_clienthello.h',
+ 'src/tls_wrap.cc',
+ 'src/tls_wrap.h'
+ ],
+ }],
],
'direct_dependent_settings': {
'defines': [
@@ -398,7 +644,7 @@
[ 'node_use_dtrace=="false" and node_use_etw=="false"', {
'inputs': [ 'src/notrace_macros.py' ]
}],
- ['node_use_lttng=="false"', {
+ [ 'node_use_lttng=="false"', {
'inputs': [ 'src/nolttng_macros.py' ]
}],
[ 'node_use_perfctr=="false"', {
@@ -451,10 +697,10 @@
{
'action_name': 'node_dtrace_provider_o',
'inputs': [
- '<(OBJ_DIR)/node/src/node_dtrace.o',
+ '<(OBJ_DIR)/<(node_lib_target_name)/src/node_dtrace.o',
],
'outputs': [
- '<(OBJ_DIR)/node/src/node_dtrace_provider.o'
+ '<(OBJ_DIR)/<(node_lib_target_name)/src/node_dtrace_provider.o'
],
'action': [ 'dtrace', '-G', '-xnolibs', '-s', 'src/node_provider.d',
'<@(_inputs)', '-o', '<@(_outputs)' ]
@@ -504,7 +750,7 @@
'<(SHARED_INTERMEDIATE_DIR)/v8constants.h'
],
'outputs': [
- '<(OBJ_DIR)/node/src/node_dtrace_ustack.o'
+ '<(OBJ_DIR)/<(node_lib_target_name)/src/node_dtrace_ustack.o'
],
'conditions': [
[ 'target_arch=="ia32" or target_arch=="arm"', {
@@ -551,12 +797,41 @@
} ],
]
},
+ {
+ # When using shared lib to build executable in Windows, in order to avoid
+ # filename collision, the executable name is node-win.exe. Need to rename
+ # it back to node.exe
+ 'target_name': 'rename_node_bin_win',
+ 'type': 'none',
+ 'dependencies': [
+ '<(node_core_target_name)',
+ ],
+ 'conditions': [
+ [ 'OS=="win" and node_intermediate_lib_type=="shared_library"', {
+ 'actions': [
+ {
+ 'action_name': 'rename_node_bin_win',
+ 'inputs': [
+ '<(PRODUCT_DIR)/<(node_core_target_name)-win.exe'
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/<(node_core_target_name).exe',
+ ],
+ 'action': [
+ 'mv', '<@(_inputs)', '<@(_outputs)',
+ ],
+ },
+ ],
+ } ],
+ ]
+ },
{
'target_name': 'cctest',
'type': 'executable',
'dependencies': [
'<(node_core_target_name)',
+ 'rename_node_bin_win',
'deps/gtest/gtest.gyp:gtest',
'node_js2c#host',
'node_dtrace_header',
@@ -565,18 +840,18 @@
],
'variables': {
- 'OBJ_PATH': '<(OBJ_DIR)/node/src',
- 'OBJ_GEN_PATH': '<(OBJ_DIR)/node/gen',
+ 'OBJ_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/src',
+ 'OBJ_GEN_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/gen',
'OBJ_SUFFIX': 'o',
'conditions': [
['OS=="win"', {
- 'OBJ_PATH': '<(OBJ_DIR)/node',
- 'OBJ_GEN_PATH': '<(OBJ_DIR)/node',
+ 'OBJ_PATH': '<(OBJ_DIR)/<(node_lib_target_name)',
+ 'OBJ_GEN_PATH': '<(OBJ_DIR)/<(node_lib_target_name)',
'OBJ_SUFFIX': 'obj',
}],
['OS=="aix"', {
- 'OBJ_PATH': '<(OBJ_DIR)/node_base/src',
- 'OBJ_GEN_PATH': '<(OBJ_DIR)/node_base/gen',
+ 'OBJ_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/src',
+ 'OBJ_GEN_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/gen',
}],
],
},
@@ -628,41 +903,91 @@
'test/cctest/test_url.cc'
],
- 'sources!': [
- 'src/node_main.cc'
- ],
-
'conditions': [
+ [ 'node_use_openssl=="true"', {
+ 'conditions': [
+ ['node_target_type!="static_library"', {
+ 'libraries': [
+ '<(OBJ_PATH)/node_crypto.<(OBJ_SUFFIX)',
+ '<(OBJ_PATH)/node_crypto_bio.<(OBJ_SUFFIX)',
+ '<(OBJ_PATH)/node_crypto_clienthello.<(OBJ_SUFFIX)',
+ '<(OBJ_PATH)/tls_wrap.<(OBJ_SUFFIX)',
+ ],
+ }],
+ ],
+ 'defines': [
+ 'HAVE_OPENSSL=1',
+ ],
+ }],
+ [ 'node_use_perfctr=="true"', {
+ 'defines': [ 'HAVE_PERFCTR=1' ],
+ 'libraries': [
+ '<(OBJ_PATH)/node_counters.<(OBJ_SUFFIX)',
+ '<(OBJ_PATH)/node_win32_perfctr_provider.<(OBJ_SUFFIX)',
+ ],
+ }],
['v8_inspector=="true"', {
'sources': [
'test/cctest/test_inspector_socket.cc',
],
+ 'dependencies': [
+ 'deps/v8_inspector/third_party/v8_inspector/platform/'
+ 'v8_inspector/v8_inspector.gyp:v8_inspector_stl',
+ 'deps/v8_inspector/third_party/v8_inspector/platform/'
+ 'v8_inspector/v8_inspector.gyp:protocol_sources_stl',
+ 'v8_inspector_compress_protocol_json#host',
+ ],
+ 'include_dirs': [
+ 'deps/v8_inspector/third_party/v8_inspector',
+ '<(SHARED_INTERMEDIATE_DIR)/blink', # for inspector
+ ],
+ 'libraries': [
+ '<(OBJ_PATH)/inspector_agent.<(OBJ_SUFFIX)',
+ '<(OBJ_PATH)/inspector_socket.<(OBJ_SUFFIX)',
+ ],
'conditions': [
[ 'node_shared_openssl=="false" and node_shared=="false"', {
'dependencies': [
'deps/openssl/openssl.gyp:openssl'
]
}],
- [ 'node_shared_http_parser=="false"', {
- 'dependencies': [
- 'deps/http_parser/http_parser.gyp:http_parser'
+ ]
+ }],
+ [ 'node_use_dtrace=="true"', {
+ 'libraries': [
+ '<(OBJ_PATH)/node_dtrace.<(OBJ_SUFFIX)',
+ ],
+ 'conditions': [
+ ['OS!="mac" and OS!="linux"', {
+ 'libraries': [
+ '<(OBJ_PATH)/node_dtrace_provider.<(OBJ_SUFFIX)',
+ '<(OBJ_PATH)/node_dtrace_ustack.<(OBJ_SUFFIX)',
]
}],
- [ 'node_shared_libuv=="false"', {
- 'dependencies': [
- 'deps/uv/uv.gyp:libuv'
+ ['OS=="linux"', {
+ 'libraries': [
+ '<(SHARED_INTERMEDIATE_DIR)/node_dtrace_provider.<(OBJ_SUFFIX)',
]
+ }],
+ ],
+ }, {
+ 'conditions': [
+ [ 'node_use_etw=="true" and OS=="win"', {
+ 'libraries': [
+ '<(OBJ_PATH)/node_dtrace.<(OBJ_SUFFIX)',
+ '<(OBJ_PATH)/node_win32_etw_provider.<(OBJ_SUFFIX)',
+ ],
}]
]
}],
- [ 'node_use_dtrace=="true" and OS!="mac" and OS!="linux"', {
- 'copies': [{
- 'destination': '<(OBJ_DIR)/cctest/src',
- 'files': [
- '<(OBJ_PATH)/node_dtrace_ustack.<(OBJ_SUFFIX)',
- '<(OBJ_PATH)/node_dtrace_provider.<(OBJ_SUFFIX)',
- '<(OBJ_PATH)/node_dtrace.<(OBJ_SUFFIX)',
- ]},
+ [ 'OS=="win"', {
+ 'libraries': [
+ '<(OBJ_PATH)/backtrace_win32.<(OBJ_SUFFIX)',
+ ],
+ }, { # POSIX
+ 'defines': [ '__POSIX__' ],
+ 'libraries': [
+ '<(OBJ_PATH)/backtrace_posix.<(OBJ_SUFFIX)',
],
}],
['OS=="solaris"', {
@@ -673,21 +998,19 @@
], # end targets
'conditions': [
- ['OS=="aix"', {
+ [ 'OS=="aix" and node_shared=="true"', {
'targets': [
{
- 'target_name': 'node',
+ 'target_name': 'node_aix_shared',
+ 'type': 'shared_library',
+ 'product_name': '<(node_core_target_name)',
+ 'ldflags': [ '--shared' ],
+ 'product_extension': '<(shlib_suffix)',
'conditions': [
- ['node_shared=="true"', {
- 'type': 'shared_library',
- 'ldflags': ['--shared'],
- 'product_extension': '<(shlib_suffix)',
- }, {
- 'type': 'executable',
- }],
['target_arch=="ppc64"', {
'ldflags': [
- '-Wl,-blibpath:/usr/lib:/lib:/opt/freeware/lib/pthread/ppc64'
+ '-Wl,-blibpath:/usr/lib:/lib:'
+ '/opt/freeware/lib/pthread/ppc64'
],
}],
['target_arch=="ppc"', {
@@ -696,45 +1019,20 @@
],
}]
],
- 'dependencies': ['<(node_core_target_name)', 'node_exp'],
-
+ 'includes': [
+ 'node.gypi'
+ ],
+ 'dependencies': [ '<(node_lib_target_name)' ],
'include_dirs': [
'src',
'deps/v8/include',
],
-
'sources': [
- 'src/node_main.cc',
'<@(library_files)',
- # node.gyp is added to the project by default.
'common.gypi',
],
-
- 'ldflags': ['-Wl,-bE:<(PRODUCT_DIR)/node.exp'],
},
- {
- 'target_name': 'node_exp',
- 'type': 'none',
- 'dependencies': [
- '<(node_core_target_name)',
- ],
- 'actions': [
- {
- 'action_name': 'expfile',
- 'inputs': [
- '<(OBJ_DIR)'
- ],
- 'outputs': [
- '<(PRODUCT_DIR)/node.exp'
- ],
- 'action': [
- 'sh', 'tools/create_expfile.sh',
- '<@(_inputs)', '<@(_outputs)'
- ],
- }
- ]
- }
- ], # end targets
+ ]
}], # end aix section
], # end conditions block
}
diff --git a/node.gypi b/node.gypi
index 3b3548fdab5308..baa7e139b525b8 100644
--- a/node.gypi
+++ b/node.gypi
@@ -1,4 +1,29 @@
{
+ # 'force_load' means to include the static libs into the shared lib or
+ # executable. Therefore, it is enabled when building:
+ # 1. The executable and it uses static lib (cctest and node)
+ # 2. The shared lib
+ # Linker optimizes out functions that are not used. When force_load=true,
+ # --whole-archive,force_load and /WHOLEARCHIVE are used to include
+ # all obj files in static libs into the executable or shared lib.
+ 'variables': {
+ 'variables': {
+ 'variables': {
+ 'force_load%': 'true',
+ 'current_type%': '<(_type)',
+ },
+ 'force_load%': '<(force_load)',
+ 'conditions': [
+ ['current_type=="static_library"', {
+ 'force_load': 'false',
+ }],
+ [ 'current_type=="executable" and node_target_type=="shared_library"', {
+ 'force_load': 'false',
+ }]
+ ],
+ },
+ 'force_load%': '<(force_load)',
+ },
'conditions': [
[ 'node_shared=="false"', {
'msvs_settings': {
@@ -36,12 +61,6 @@
[ 'node_v8_options!=""', {
'defines': [ 'NODE_V8_OPTIONS="<(node_v8_options)"'],
}],
- # No node_main.cc for anything except executable
- [ 'node_target_type!="executable"', {
- 'sources!': [
- 'src/node_main.cc',
- ],
- }],
[ 'node_release_urlbase!=""', {
'defines': [
'NODE_RELEASE_URLBASE="<(node_release_urlbase)"',
@@ -66,156 +85,6 @@
'deps/v8/src/third_party/vtune/v8vtune.gyp:v8_vtune'
],
}],
- ['v8_inspector=="true"', {
- 'defines': [
- 'HAVE_INSPECTOR=1',
- ],
- 'sources': [
- 'src/inspector_agent.cc',
- 'src/inspector_socket.cc',
- 'src/inspector_agent.h',
- 'src/inspector_socket.h',
- ],
- 'dependencies': [
- 'deps/v8_inspector/third_party/v8_inspector/platform/'
- 'v8_inspector/v8_inspector.gyp:v8_inspector_stl',
- 'v8_inspector_compress_protocol_json#host',
- ],
- 'include_dirs': [
- 'deps/v8_inspector/third_party/v8_inspector',
- '<(SHARED_INTERMEDIATE_DIR)/blink', # for inspector
- ],
- }, {
- 'defines': [ 'HAVE_INSPECTOR=0' ]
- }],
- [ 'node_use_openssl=="true"', {
- 'defines': [ 'HAVE_OPENSSL=1' ],
- 'sources': [
- 'src/node_crypto.cc',
- 'src/node_crypto_bio.cc',
- 'src/node_crypto_clienthello.cc',
- 'src/node_crypto.h',
- 'src/node_crypto_bio.h',
- 'src/node_crypto_clienthello.h',
- 'src/tls_wrap.cc',
- 'src/tls_wrap.h'
- ],
- 'conditions': [
- ['openssl_fips != ""', {
- 'defines': [ 'NODE_FIPS_MODE' ],
- }],
- [ 'node_shared_openssl=="false"', {
- 'dependencies': [
- './deps/openssl/openssl.gyp:openssl',
-
- # For tests
- './deps/openssl/openssl.gyp:openssl-cli',
- ],
- # Do not let unused OpenSSL symbols to slip away
- 'conditions': [
- # -force_load or --whole-archive are not applicable for
- # the static library
- [ 'node_target_type!="static_library"', {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-Wl,-force_load,<(PRODUCT_DIR)/<(OPENSSL_PRODUCT)',
- ],
- },
- 'conditions': [
- ['OS in "linux freebsd" and node_shared=="false"', {
- 'ldflags': [
- '-Wl,--whole-archive,'
- '<(OBJ_DIR)/deps/openssl/'
- '<(OPENSSL_PRODUCT)',
- '-Wl,--no-whole-archive',
- ],
- }],
- # openssl.def is based on zlib.def, zlib symbols
- # are always exported.
- ['use_openssl_def==1', {
- 'sources': ['<(SHARED_INTERMEDIATE_DIR)/openssl.def'],
- }],
- ['OS=="win" and use_openssl_def==0', {
- 'sources': ['deps/zlib/win32/zlib.def'],
- }],
- ],
- }],
- ],
- }]]
- }, {
- 'defines': [ 'HAVE_OPENSSL=0' ]
- }],
- [ 'node_use_dtrace=="true"', {
- 'defines': [ 'HAVE_DTRACE=1' ],
- 'dependencies': [
- 'node_dtrace_header',
- 'specialize_node_d',
- ],
- 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)' ],
-
- #
- # DTrace is supported on linux, solaris, mac, and bsd. There are
- # three object files associated with DTrace support, but they're
- # not all used all the time:
- #
- # node_dtrace.o all configurations
- # node_dtrace_ustack.o not supported on mac and linux
- # node_dtrace_provider.o All except OS X. "dtrace -G" is not
- # used on OS X.
- #
- # Note that node_dtrace_provider.cc and node_dtrace_ustack.cc do not
- # actually exist. They're listed here to trick GYP into linking the
- # corresponding object files into the final "node" executable. These
- # object files are generated by "dtrace -G" using custom actions
- # below, and the GYP-generated Makefiles will properly build them when
- # needed.
- #
- 'sources': [ 'src/node_dtrace.cc' ],
- 'conditions': [
- [ 'OS=="linux"', {
- 'sources': [
- '<(SHARED_INTERMEDIATE_DIR)/node_dtrace_provider.o'
- ],
- }],
- [ 'OS!="mac" and OS!="linux"', {
- 'sources': [
- 'src/node_dtrace_ustack.cc',
- 'src/node_dtrace_provider.cc',
- ]
- }
- ] ]
- } ],
- [ 'node_use_lttng=="true"', {
- 'defines': [ 'HAVE_LTTNG=1' ],
- 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)' ],
- 'libraries': [ '-llttng-ust' ],
- 'sources': [
- 'src/node_lttng.cc'
- ],
- } ],
- [ 'node_use_etw=="true"', {
- 'defines': [ 'HAVE_ETW=1' ],
- 'dependencies': [ 'node_etw' ],
- 'sources': [
- 'src/node_win32_etw_provider.h',
- 'src/node_win32_etw_provider-inl.h',
- 'src/node_win32_etw_provider.cc',
- 'src/node_dtrace.cc',
- 'tools/msvs/genfiles/node_etw_provider.h',
- 'tools/msvs/genfiles/node_etw_provider.rc',
- ]
- } ],
- [ 'node_use_perfctr=="true"', {
- 'defines': [ 'HAVE_PERFCTR=1' ],
- 'dependencies': [ 'node_perfctr' ],
- 'sources': [
- 'src/node_win32_perfctr_provider.h',
- 'src/node_win32_perfctr_provider.cc',
- 'src/node_counters.cc',
- 'src/node_counters.h',
- 'tools/msvs/genfiles/node_perfctr_provider.rc',
- ]
- } ],
[ 'node_no_browser_globals=="true"', {
'defines': [ 'NODE_NO_BROWSER_GLOBALS' ],
} ],
@@ -223,7 +92,7 @@
'dependencies': [ 'deps/v8/tools/gyp/v8.gyp:postmortem-metadata' ],
'conditions': [
# -force_load is not applicable for the static library
- [ 'node_target_type!="static_library"', {
+ [ 'force_load=="true"', {
'xcode_settings': {
'OTHER_LDFLAGS': [
'-Wl,-force_load,<(V8_BASE)',
@@ -248,25 +117,6 @@
'dependencies': [ 'deps/uv/uv.gyp:libuv' ],
}],
- [ 'OS=="win"', {
- 'sources': [
- 'src/backtrace_win32.cc',
- 'src/res/node.rc',
- ],
- 'defines!': [
- 'NODE_PLATFORM="win"',
- ],
- 'defines': [
- 'FD_SETSIZE=1024',
- # we need to use node's preferred "win32" rather than gyp's preferred "win"
- 'NODE_PLATFORM="win32"',
- '_UNICODE=1',
- ],
- 'libraries': [ '-lpsapi.lib' ]
- }, { # POSIX
- 'defines': [ '__POSIX__' ],
- 'sources': [ 'src/backtrace_posix.cc' ],
- }],
[ 'OS=="mac"', {
# linking Corefoundation is needed since certain OSX debugging tools
# like Instruments require it for some features
@@ -289,6 +139,27 @@
'defines': [
'_LINUX_SOURCE_COMPAT',
],
+ 'conditions': [
+ [ 'force_load=="true"', {
+
+ 'actions': [
+ {
+ 'action_name': 'expfile',
+ 'inputs': [
+ '<(OBJ_DIR)'
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/node.exp'
+ ],
+ 'action': [
+ 'sh', 'tools/create_expfile.sh',
+ '<@(_inputs)', '<@(_outputs)'
+ ],
+ }
+ ],
+ 'ldflags': ['-Wl,-bE:<(PRODUCT_DIR)/node.exp', '-Wl,-brtl'],
+ }],
+ ],
}],
[ 'OS=="solaris"', {
'libraries': [
@@ -304,12 +175,14 @@
'NODE_PLATFORM="sunos"',
],
}],
- [ '(OS=="freebsd" or OS=="linux") and node_shared=="false" and coverage=="false"', {
+ [ '(OS=="freebsd" or OS=="linux") and node_shared=="false"'
+ ' and coverage=="false" and force_load=="true"', {
'ldflags': [ '-Wl,-z,noexecstack',
'-Wl,--whole-archive <(V8_BASE)',
'-Wl,--no-whole-archive' ]
}],
- [ '(OS=="freebsd" or OS=="linux") and node_shared=="false" and coverage=="true"', {
+ [ '(OS=="freebsd" or OS=="linux") and node_shared=="false"'
+ ' and coverage=="true" and force_load=="true"', {
'ldflags': [ '-Wl,-z,noexecstack',
'-Wl,--whole-archive <(V8_BASE)',
'-Wl,--no-whole-archive',
@@ -324,5 +197,54 @@
[ 'OS=="sunos"', {
'ldflags': [ '-Wl,-M,/usr/lib/ld/map.noexstk' ],
}],
+
+ [ 'node_use_openssl=="true"', {
+ 'defines': [ 'HAVE_OPENSSL=1' ],
+ 'conditions': [
+ ['openssl_fips != ""', {
+ 'defines': [ 'NODE_FIPS_MODE' ],
+ }],
+ [ 'node_shared_openssl=="false"', {
+ 'dependencies': [
+ './deps/openssl/openssl.gyp:openssl',
+
+ # For tests
+ './deps/openssl/openssl.gyp:openssl-cli',
+ ],
+ 'conditions': [
+ # -force_load or --whole-archive are not applicable for
+ # the static library
+ [ 'force_load=="true"', {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-Wl,-force_load,<(PRODUCT_DIR)/<(OPENSSL_PRODUCT)',
+ ],
+ },
+ 'conditions': [
+ ['OS in "linux freebsd" and node_shared=="false"', {
+ 'ldflags': [
+ '-Wl,--whole-archive,'
+ '<(OBJ_DIR)/deps/openssl/'
+ '<(OPENSSL_PRODUCT)',
+ '-Wl,--no-whole-archive',
+ ],
+ }],
+ # openssl.def is based on zlib.def, zlib symbols
+ # are always exported.
+ ['use_openssl_def==1', {
+ 'sources': ['<(SHARED_INTERMEDIATE_DIR)/openssl.def'],
+ }],
+ ['OS=="win" and use_openssl_def==0', {
+ 'sources': ['deps/zlib/win32/zlib.def'],
+ }],
+ ],
+ }],
+ ],
+ }]]
+
+ }, {
+ 'defines': [ 'HAVE_OPENSSL=0' ]
+ }],
+
],
}
diff --git a/src/node_main.cc b/src/node_main.cc
index bde397562490e0..89ca076eeb43b4 100644
--- a/src/node_main.cc
+++ b/src/node_main.cc
@@ -1,6 +1,7 @@
#include "node.h"
#ifdef _WIN32
+#include
#include
int wmain(int argc, wchar_t *wargv[]) {
From 8f830ca896a13722b2295bf4ff6f03193fd8ba4e Mon Sep 17 00:00:00 2001
From: Luigi Pinca
Date: Thu, 18 Jan 2018 18:39:02 +0100
Subject: [PATCH 020/227] stream: remove unreachable code
To avoid a function call `BufferList.prototype.concat()` is not called
when there is only a buffer in the list. That buffer is instead
accessed directly.
Backport-PR-URL: https://github.com/nodejs/node/pull/19483
PR-URL: https://github.com/nodejs/node/pull/18239
Reviewed-By: Matteo Collina
---
lib/internal/streams/BufferList.js | 2 --
1 file changed, 2 deletions(-)
diff --git a/lib/internal/streams/BufferList.js b/lib/internal/streams/BufferList.js
index 76da94bc83d977..9d3badf5b378bd 100644
--- a/lib/internal/streams/BufferList.js
+++ b/lib/internal/streams/BufferList.js
@@ -58,8 +58,6 @@ BufferList.prototype.join = function(s) {
BufferList.prototype.concat = function(n) {
if (this.length === 0)
return Buffer.alloc(0);
- if (this.length === 1)
- return this.head.data;
const ret = Buffer.allocUnsafe(n >>> 0);
var p = this.head;
var i = 0;
From 5bcf668f42566c45286c41eb0c817612412de753 Mon Sep 17 00:00:00 2001
From: Luigi Pinca
Date: Thu, 18 Jan 2018 18:51:09 +0100
Subject: [PATCH 021/227] test: use correct size in test-stream-buffer-list
The `n` argument of `BufferList.prototype.concat()` is not the number
of `Buffer` instances in the list, but their total length when
concatenated.
Backport-PR-URL: https://github.com/nodejs/node/pull/19483
PR-URL: https://github.com/nodejs/node/pull/18239
Reviewed-By: Matteo Collina
---
test/parallel/test-stream-buffer-list.js | 11 ++++++++---
1 file changed, 8 insertions(+), 3 deletions(-)
diff --git a/test/parallel/test-stream-buffer-list.js b/test/parallel/test-stream-buffer-list.js
index ddbff452de4be9..6ea359b458f61b 100644
--- a/test/parallel/test-stream-buffer-list.js
+++ b/test/parallel/test-stream-buffer-list.js
@@ -14,14 +14,19 @@ assert.strictEqual(emptyList.join(','), '');
assert.deepStrictEqual(emptyList.concat(0), Buffer.alloc(0));
+const buf = Buffer.from('foo');
+
// Test buffer list with one element.
const list = new BufferList();
-list.push('foo');
+list.push(buf);
+
+const copy = list.concat(3);
-assert.strictEqual(list.concat(1), 'foo');
+assert.notStrictEqual(copy, buf);
+assert.deepStrictEqual(copy, buf);
assert.strictEqual(list.join(','), 'foo');
const shifted = list.shift();
-assert.strictEqual(shifted, 'foo');
+assert.strictEqual(shifted, buf);
assert.deepStrictEqual(list, new BufferList());
From bc2f0a51209b5c48202f509308314443431388a9 Mon Sep 17 00:00:00 2001
From: Vse Mozhet Byt
Date: Tue, 30 Jan 2018 00:15:53 +0200
Subject: [PATCH 022/227] doc: linkify missing types
Also, alphabetize all types in type-parser.js
and fix some nits in type formats.
Backport-PR-URL: https://github.com/nodejs/node/pull/19500
PR-URL: https://github.com/nodejs/node/pull/18444
Reviewed-By: Luigi Pinca
Reviewed-By: James M Snell
---
doc/api/cluster.md | 2 +-
doc/api/readline.md | 14 +++++------
doc/api/repl.md | 4 +--
doc/api/stream.md | 8 +++---
tools/doc/type-parser.js | 53 ++++++++++++++++++++++++++++++----------
5 files changed, 54 insertions(+), 27 deletions(-)
diff --git a/doc/api/cluster.md b/doc/api/cluster.md
index 648c753931d936..0bc768030728ab 100644
--- a/doc/api/cluster.md
+++ b/doc/api/cluster.md
@@ -265,7 +265,7 @@ It is not emitted in the worker.
added: v0.7.7
-->
-* Returns: {Worker} A reference to `worker`.
+* Returns: {cluster.Worker} A reference to `worker`.
In a worker, this function will close all servers, wait for the `'close'` event on
those servers, and then disconnect the IPC channel.
diff --git a/doc/api/readline.md b/doc/api/readline.md
index b88c37e5bdbfa3..04c838a0024ed4 100644
--- a/doc/api/readline.md
+++ b/doc/api/readline.md
@@ -323,7 +323,7 @@ Interface's `input` *as if it were provided by the user*.
added: v0.7.7
-->
-* `stream` {Writable}
+* `stream` {stream.Writable}
* `dir` {number}
* `-1` - to the left from cursor
* `1` - to the right from cursor
@@ -338,7 +338,7 @@ in a specified direction identified by `dir`.
added: v0.7.7
-->
-* `stream` {Writable}
+* `stream` {stream.Writable}
The `readline.clearScreenDown()` method clears the given [TTY][] stream from
the current position of the cursor down.
@@ -349,9 +349,9 @@ added: v0.1.98
-->
* `options` {Object}
- * `input` {Readable} The [Readable][] stream to listen to. This option is
+ * `input` {stream.Readable} The [Readable][] stream to listen to. This option is
*required*.
- * `output` {Writable} The [Writable][] stream to write readline data to.
+ * `output` {stream.Writable} The [Writable][] stream to write readline data to.
* `completer` {Function} An optional function used for Tab autocompletion.
* `terminal` {boolean} `true` if the `input` and `output` streams should be
treated like a TTY, and have ANSI/VT100 escape codes written to it.
@@ -431,7 +431,7 @@ function completer(linePartial, callback) {
added: v0.7.7
-->
-* `stream` {Writable}
+* `stream` {stream.Writable}
* `x` {number}
* `y` {number}
@@ -443,7 +443,7 @@ given [TTY][] `stream`.
added: v0.7.7
-->
-* `stream` {Readable}
+* `stream` {stream.Readable}
* `interface` {readline.Interface}
The `readline.emitKeypressEvents()` method causes the given [Readable][]
@@ -469,7 +469,7 @@ if (process.stdin.isTTY)
added: v0.7.7
-->
-* `stream` {Writable}
+* `stream` {stream.Writable}
* `dx` {number}
* `dy` {number}
diff --git a/doc/api/repl.md b/doc/api/repl.md
index 69f709a1be874b..17c44fbe36c7ba 100644
--- a/doc/api/repl.md
+++ b/doc/api/repl.md
@@ -381,9 +381,9 @@ added: v0.1.91
* `options` {Object | string}
* `prompt` {string} The input prompt to display. Defaults to `> `.
- * `input` {Readable} The Readable stream from which REPL input will be read.
+ * `input` {stream.Readable} The Readable stream from which REPL input will be read.
Defaults to `process.stdin`.
- * `output` {Writable} The Writable stream to which REPL output will be
+ * `output` {stream.Writable} The Writable stream to which REPL output will be
written. Defaults to `process.stdout`.
* `terminal` {boolean} If `true`, specifies that the `output` should be
treated as a TTY terminal, and have ANSI/VT100 escape codes written to it.
diff --git a/doc/api/stream.md b/doc/api/stream.md
index 4e7e897b3dd163..f1b5ef565517ff 100644
--- a/doc/api/stream.md
+++ b/doc/api/stream.md
@@ -385,7 +385,7 @@ added: v0.11.15
-->
* `encoding` {string} The new default encoding
-* Returns: `this`
+* Returns: {this}
The `writable.setDefaultEncoding()` method sets the default `encoding` for a
[Writable][] stream.
@@ -771,7 +771,7 @@ readable.isPaused(); // === false
added: v0.9.4
-->
-* Returns: `this`
+* Returns: {this}
The `readable.pause()` method will cause a stream in flowing mode to stop
emitting [`'data'`][] events, switching out of flowing mode. Any data that
@@ -903,7 +903,7 @@ event has been emitted will return `null`. No runtime error will be raised.
added: v0.9.4
-->
-* Returns: `this`
+* Returns: {this}
The `readable.resume()` method causes an explicitly paused Readable stream to
resume emitting [`'data'`][] events, switching the stream into flowing mode.
@@ -926,7 +926,7 @@ added: v0.9.4
-->
* `encoding` {string} The encoding to use.
-* Returns: `this`
+* Returns: {this}
The `readable.setEncoding()` method sets the character encoding for
data read from the Readable stream.
diff --git a/tools/doc/type-parser.js b/tools/doc/type-parser.js
index bfe058a42c2cd2..5aa91c39b2a669 100644
--- a/tools/doc/type-parser.js
+++ b/tools/doc/type-parser.js
@@ -14,31 +14,58 @@ const jsPrimitives = {
'undefined': 'Undefined'
};
const jsGlobalTypes = [
- 'Error', 'Object', 'Function', 'Array', 'TypedArray', 'Uint8Array',
- 'Uint16Array', 'Uint32Array', 'Int8Array', 'Int16Array', 'Int32Array',
- 'Uint8ClampedArray', 'Float32Array', 'Float64Array', 'Date', 'RegExp',
- 'ArrayBuffer', 'DataView', 'Promise', 'EvalError', 'RangeError',
- 'ReferenceError', 'SyntaxError', 'TypeError', 'URIError'
+ 'Array', 'ArrayBuffer', 'DataView', 'Date', 'Error', 'EvalError',
+ 'Float32Array', 'Float64Array', 'Function', 'Int16Array', 'Int32Array',
+ 'Int8Array', 'Object', 'Promise', 'RangeError', 'ReferenceError', 'RegExp',
+ 'SharedArrayBuffer', 'SyntaxError', 'TypeError', 'TypedArray', 'URIError',
+ 'Uint16Array', 'Uint32Array', 'Uint8Array', 'Uint8ClampedArray'
];
const typeMap = {
+ 'Iterable':
+ 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol',
+ 'Iterator':
+ 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterator_protocol',
+
+ 'this':
+ 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/this',
+
'Buffer': 'buffer.html#buffer_class_buffer',
- 'Handle': 'net.html#net_server_listen_handle_backlog_callback',
- 'Stream': 'stream.html#stream_stream',
- 'stream.Writable': 'stream.html#stream_class_stream_writable',
- 'stream.Readable': 'stream.html#stream_class_stream_readable',
- 'stream.Duplex': 'stream.html#stream_class_stream_duplex',
+
'ChildProcess': 'child_process.html#child_process_class_childprocess',
+
'cluster.Worker': 'cluster.html#cluster_class_worker',
+
'dgram.Socket': 'dgram.html#dgram_class_dgram_socket',
- 'net.Socket': 'net.html#net_class_net_socket',
- 'tls.TLSSocket': 'tls.html#tls_class_tls_tlssocket',
+
+ 'Domain': 'domain.html#domain_class_domain',
+
'EventEmitter': 'events.html#events_class_eventemitter',
- 'Timer': 'timers.html#timers_timers',
+
+ 'fs.Stats': 'fs.html#fs_class_fs_stats',
+
'http.Agent': 'http.html#http_class_http_agent',
'http.ClientRequest': 'http.html#http_class_http_clientrequest',
'http.IncomingMessage': 'http.html#http_class_http_incomingmessage',
'http.Server': 'http.html#http_class_http_server',
'http.ServerResponse': 'http.html#http_class_http_serverresponse',
+
+ 'Handle': 'net.html#net_server_listen_handle_backlog_callback',
+ 'net.Server': 'net.html#net_class_net_server',
+ 'net.Socket': 'net.html#net_class_net_socket',
+
+ 'readline.Interface': 'readline.html#readline_class_interface',
+
+ 'Stream': 'stream.html#stream_stream',
+ 'stream.Duplex': 'stream.html#stream_class_stream_duplex',
+ 'stream.Readable': 'stream.html#stream_class_stream_readable',
+ 'stream.Writable': 'stream.html#stream_class_stream_writable',
+
+ 'Immediate': 'timers.html#timers_class_immediate',
+ 'Timeout': 'timers.html#timers_class_timeout',
+ 'Timer': 'timers.html#timers_timers',
+
+ 'tls.TLSSocket': 'tls.html#tls_class_tls_tlssocket',
+
'URL': 'url.html#url_the_whatwg_url_api',
'URLSearchParams': 'url.html#url_class_urlsearchparams'
};
From 6ff763bd665a5fddbff0948bd6b4e91cbcec3216 Mon Sep 17 00:00:00 2001
From: Bartosz Sosnowski
Date: Mon, 11 Dec 2017 18:08:20 +0100
Subject: [PATCH 023/227] win, build: fix without-intl option
Fixes --with-intl option passed to configure script when without-intl
is used
Backport-PR-URL: https://github.com/nodejs/node/pull/19485
PR-URL: https://github.com/nodejs/node/pull/17614
Reviewed-By: James M Snell
Reviewed-By: Anna Henningsen
Reviewed-By: Richard Lau
Reviewed-By: Gireesh Punathil
---
vcbuild.bat | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/vcbuild.bat b/vcbuild.bat
index e6a6d0d20f7ac2..815a35d4c7030a 100644
--- a/vcbuild.bat
+++ b/vcbuild.bat
@@ -85,7 +85,7 @@ if /i "%1"=="upload" set upload=1&goto arg-ok
if /i "%1"=="small-icu" set i18n_arg=%1&goto arg-ok
if /i "%1"=="full-icu" set i18n_arg=%1&goto arg-ok
if /i "%1"=="intl-none" set i18n_arg=%1&goto arg-ok
-if /i "%1"=="without-intl" set i18n_arg=%1&goto arg-ok
+if /i "%1"=="without-intl" set i18n_arg=none&goto arg-ok
if /i "%1"=="download-all" set download_arg="--download=all"&goto arg-ok
if /i "%1"=="ignore-flaky" set test_args=%test_args% --flaky-tests=dontcare&goto arg-ok
if /i "%1"=="enable-vtune" set enable_vtune_arg=1&goto arg-ok
From a91b1b928ca7705e03f1002989677811c8098799 Mon Sep 17 00:00:00 2001
From: Birunthan Mohanathas
Date: Mon, 22 Jan 2018 22:25:34 +0530
Subject: [PATCH 024/227] win, build: fix intl-none option
Like #17614, but for the `intl-none` option.
Backport-PR-URL: https://github.com/nodejs/node/pull/19485
Refs: https://github.com/nodejs/node/pull/17614
PR-URL: https://github.com/nodejs/node/pull/18292
Reviewed-By: Richard Lau
Reviewed-By: Gireesh Punathil
Reviewed-By: James M Snell
Reviewed-By: Bartosz Sosnowski
---
vcbuild.bat | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/vcbuild.bat b/vcbuild.bat
index 815a35d4c7030a..a46c99d75916e6 100644
--- a/vcbuild.bat
+++ b/vcbuild.bat
@@ -84,7 +84,7 @@ if /i "%1"=="build-release" set build_release=1&goto arg-ok
if /i "%1"=="upload" set upload=1&goto arg-ok
if /i "%1"=="small-icu" set i18n_arg=%1&goto arg-ok
if /i "%1"=="full-icu" set i18n_arg=%1&goto arg-ok
-if /i "%1"=="intl-none" set i18n_arg=%1&goto arg-ok
+if /i "%1"=="intl-none" set i18n_arg=none&goto arg-ok
if /i "%1"=="without-intl" set i18n_arg=none&goto arg-ok
if /i "%1"=="download-all" set download_arg="--download=all"&goto arg-ok
if /i "%1"=="ignore-flaky" set test_args=%test_args% --flaky-tests=dontcare&goto arg-ok
From 988cca841ed0b2f22d4f06036a95f9209d54c7c3 Mon Sep 17 00:00:00 2001
From: Anna Henningsen
Date: Tue, 30 Jan 2018 20:27:17 +0100
Subject: [PATCH 025/227] process: fix reading zero-length env vars on win32
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Up until now, Node did not clear the current error code
attempting to read environment variables on Windows.
Since checking the error code is the way we distinguish between
missing and zero-length environment variables, this could lead to a
false positive when the error code was still tainted.
In the simplest case, accessing a missing variable and then a
zero-length one would lead Node to believe that both calls yielded
an error.
Before:
> process.env.I=''; process.env.Q; process.env.I
undefined
> process.env.I=''; /*process.env.Q;*/ process.env.I
''
After:
> process.env.I=''; process.env.Q; process.env.I
''
> process.env.I=''; /*process.env.Q;*/ process.env.I
''
This only affects Node 8 and above, since before
1aa595e5bd64241451b3884d3029b9b9aa97c42d we always constructed a
`v8::String::Value` instance for passing the lookup key to the OS,
which in in turn always made a heap allocation and therefore
reset the error code.
Backport-PR-URL: https://github.com/nodejs/node/pull/19484
PR-URL: https://github.com/nodejs/node/pull/18463
Reviewed-By: Ben Noordhuis
Reviewed-By: Jeremiah Senkpiel
Reviewed-By: Fedor Indutny
Reviewed-By: Tobias Nießen
Reviewed-By: Colin Ihrig
Reviewed-By: Michaël Zasso
Reviewed-By: James M Snell
Reviewed-By: Tiancheng "Timothy" Gu
Reviewed-By: Sakthipriyan Vairamani
Reviewed-By: Ruben Bridgewater
---
src/node.cc | 2 ++
.../test-process-env-windows-error-reset.js | 22 +++++++++++++++++++
2 files changed, 24 insertions(+)
create mode 100644 test/parallel/test-process-env-windows-error-reset.js
diff --git a/src/node.cc b/src/node.cc
index 8ab9e4d1f45f51..be82939e00b708 100644
--- a/src/node.cc
+++ b/src/node.cc
@@ -2799,6 +2799,7 @@ static void EnvGetter(Local property,
#else // _WIN32
String::Value key(property);
WCHAR buffer[32767]; // The maximum size allowed for environment variables.
+ SetLastError(ERROR_SUCCESS);
DWORD result = GetEnvironmentVariableW(reinterpret_cast(*key),
buffer,
arraysize(buffer));
@@ -2846,6 +2847,7 @@ static void EnvQuery(Local property,
#else // _WIN32
String::Value key(property);
WCHAR* key_ptr = reinterpret_cast(*key);
+ SetLastError(ERROR_SUCCESS);
if (GetEnvironmentVariableW(key_ptr, nullptr, 0) > 0 ||
GetLastError() == ERROR_SUCCESS) {
rc = 0;
diff --git a/test/parallel/test-process-env-windows-error-reset.js b/test/parallel/test-process-env-windows-error-reset.js
new file mode 100644
index 00000000000000..59e5f287d82346
--- /dev/null
+++ b/test/parallel/test-process-env-windows-error-reset.js
@@ -0,0 +1,22 @@
+'use strict';
+require('../common');
+const assert = require('assert');
+
+// This checks that after accessing a missing env var, a subsequent
+// env read will succeed even for empty variables.
+
+{
+ process.env.FOO = '';
+ process.env.NONEXISTENT_ENV_VAR;
+ const foo = process.env.FOO;
+
+ assert.strictEqual(foo, '');
+}
+
+{
+ process.env.FOO = '';
+ process.env.NONEXISTENT_ENV_VAR;
+ const hasFoo = 'FOO' in process.env;
+
+ assert.strictEqual(hasFoo, true);
+}
From 0067bccf6f6bcc5558d20b06f8504449030bd885 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?=
Date: Thu, 8 Feb 2018 19:43:05 +0100
Subject: [PATCH 026/227] doc: fix description of createDecipheriv
PR-URL: https://github.com/nodejs/node/pull/18651
Refs: https://github.com/nodejs/node/pull/12223
Reviewed-By: Ruben Bridgewater
Reviewed-By: Luigi Pinca
Reviewed-By: Tiancheng "Timothy" Gu
---
doc/api/crypto.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/api/crypto.md b/doc/api/crypto.md
index 5f8de17b0b9ff7..18721a5a2b0f0e 100644
--- a/doc/api/crypto.md
+++ b/doc/api/crypto.md
@@ -1172,8 +1172,8 @@ recent OpenSSL releases, `openssl list-cipher-algorithms` will display the
available cipher algorithms.
The `key` is the raw key used by the `algorithm` and `iv` is an
-[initialization vector][]. Both arguments must be `'utf8'` encoded strings or
-[buffers][`Buffer`].
+[initialization vector][]. Both arguments must be `'utf8'` encoded strings,
+[Buffers][`Buffer`], `TypedArray`, or `DataView`s.
### crypto.createDiffieHellman(prime[, prime_encoding][, generator][, generator_encoding])
* `urlString` {string} The URL string to parse.
From bf72ee667e164acfe466d2c171fd8020a9f0c47b Mon Sep 17 00:00:00 2001
From: Matheus Marchini
Date: Mon, 12 Feb 2018 12:43:11 -0500
Subject: [PATCH 034/227] doc: add mmarchini to collaborators
PR-URL: https://github.com/nodejs/node/pull/18740
Reviewed-By: Joyee Cheung
Reviewed-By: Matteo Collina
Reviewed-By: Ruben Bridgewater
Reviewed-By: Richard Lau
---
README.md | 2 ++
1 file changed, 2 insertions(+)
diff --git a/README.md b/README.md
index 43b6b17d959ebe..cc8a324d1520d7 100644
--- a/README.md
+++ b/README.md
@@ -429,6 +429,8 @@ For more information about the governance of the Node.js project, see
**Mikeal Rogers** <mikeal.rogers@gmail.com>
* [misterdjules](https://github.com/misterdjules) -
**Julien Gilli** <jgilli@nodejs.org>
+* [mmarchini](https://github.com/mmarchini) -
+**Matheus Marchini** <matheus@sthima.com>
* [mscdex](https://github.com/mscdex) -
**Brian White** <mscdex@mscdex.net>
* [MylesBorins](https://github.com/MylesBorins) -
From 057c80b0886464ec9f5899813a5217e27f262097 Mon Sep 17 00:00:00 2001
From: Myles Borins
Date: Tue, 13 Feb 2018 02:09:44 -0500
Subject: [PATCH 035/227] doc: move Fedor to TSC Emeritus
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
In a conversation Fedor requested that this PR be made. They plan
to continue working on core as a Collaborator. It is this committers
belief that if Fedor would like to join the TSC again in the future
there is no reason that could not be made possible.
Thanks for all the hard work!
PR-URL: https://github.com/nodejs/node/pull/18752
Reviewed-By: Gireesh Punathil
Reviewed-By: Fedor Indutny
Reviewed-By: Yuta Hiroto
Reviewed-By: Ruben Bridgewater
Reviewed-By: Colin Ihrig
Reviewed-By: James M Snell
Reviewed-By: Anatoli Papirovski
Reviewed-By: Ali Ijaz Sheikh
Reviewed-By: Matteo Collina
Reviewed-By: Сковорода Никита Андреевич
Reviewed-By: Rich Trott
Reviewed-By: Jon Moss
Reviewed-By: Tobias Nießen
Reviewed-By: Joyee Cheung
Reviewed-By: Sakthipriyan Vairamani
Reviewed-By: Michael Dawson
Reviewed-By: Tiancheng "Timothy" Gu
Reviewed-By: Evan Lucas
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index cc8a324d1520d7..6d894e9c8e40e8 100644
--- a/README.md
+++ b/README.md
@@ -249,8 +249,6 @@ For more information about the governance of the Node.js project, see
**Jeremiah Senkpiel** <fishrock123@rocketmail.com>
* [gibfahn](https://github.com/gibfahn) -
**Gibson Fahnestock** <gibfahn@gmail.com> (he/him)
-* [indutny](https://github.com/indutny) -
-**Fedor Indutny** <fedor.indutny@gmail.com>
* [jasnell](https://github.com/jasnell) -
**James M Snell** <jasnell@gmail.com> (he/him)
* [joyeecheung](https://github.com/joyeecheung) -
@@ -280,6 +278,8 @@ For more information about the governance of the Node.js project, see
**Ben Noordhuis** <info@bnoordhuis.nl>
* [chrisdickinson](https://github.com/chrisdickinson) -
**Chris Dickinson** <christopher.s.dickinson@gmail.com>
+* [indutny](https://github.com/indutny) -
+**Fedor Indutny** <fedor.indutny@gmail.com>
* [isaacs](https://github.com/isaacs) -
**Isaac Z. Schlueter** <i@izs.me>
* [joshgav](https://github.com/joshgav) -
From 77a405b92f768a22c2fc2201d9fac4408108e741 Mon Sep 17 00:00:00 2001
From: Aaron Bieber
Date: Sat, 3 Feb 2018 09:52:29 -0700
Subject: [PATCH 036/227] lib: set process.execPath on OpenBSD
PR-URL: https://github.com/nodejs/node/pull/18543
Reviewed-By: Luigi Pinca
Reviewed-By: James M Snell
---
lib/internal/bootstrap_node.js | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/lib/internal/bootstrap_node.js b/lib/internal/bootstrap_node.js
index 4b5a500adc598e..3cadf1e399e51b 100644
--- a/lib/internal/bootstrap_node.js
+++ b/lib/internal/bootstrap_node.js
@@ -66,6 +66,13 @@
// URL::ToObject() method is used.
NativeModule.require('internal/url');
+ // On OpenBSD process.execPath will be relative unless we
+ // get the full path before process.execPath is used.
+ if (process.platform === 'openbsd') {
+ const { realpathSync } = NativeModule.require('fs');
+ process.execPath = realpathSync.native(process.execPath);
+ }
+
Object.defineProperty(process, 'argv0', {
enumerable: true,
configurable: false,
From d799b1cb615b5da86a5ed8e39c3bc519266af841 Mon Sep 17 00:00:00 2001
From: Aaron Bieber
Date: Sat, 3 Feb 2018 09:53:57 -0700
Subject: [PATCH 037/227] test: update a few tests to work on OpenBSD
PR-URL: https://github.com/nodejs/node/pull/18543
Reviewed-By: Luigi Pinca
Reviewed-By: James M Snell
---
test/common/index.js | 1 +
test/parallel/test-child-process-exec-timeout.js | 11 +++++++++--
test/parallel/test-net-dns-error.js | 8 ++++++--
test/parallel/test-setproctitle.js | 2 +-
4 files changed, 17 insertions(+), 5 deletions(-)
diff --git a/test/common/index.js b/test/common/index.js
index d762f4e0aa8498..315e35d04195f4 100644
--- a/test/common/index.js
+++ b/test/common/index.js
@@ -29,6 +29,7 @@ exports.isLinuxPPCBE = (process.platform === 'linux') &&
(os.endianness() === 'BE');
exports.isSunOS = process.platform === 'sunos';
exports.isFreeBSD = process.platform === 'freebsd';
+exports.isOpenBSD = process.platform === 'openbsd';
exports.isLinux = process.platform === 'linux';
exports.isOSX = process.platform === 'darwin';
diff --git a/test/parallel/test-child-process-exec-timeout.js b/test/parallel/test-child-process-exec-timeout.js
index 20923d922ab84c..369fab7d01d17a 100644
--- a/test/parallel/test-child-process-exec-timeout.js
+++ b/test/parallel/test-child-process-exec-timeout.js
@@ -16,15 +16,22 @@ const cmd = `"${process.execPath}" "${__filename}" child`;
// Test the case where a timeout is set, and it expires.
cp.exec(cmd, { timeout: 1 }, common.mustCall((err, stdout, stderr) => {
+ let sigterm = 'SIGTERM';
assert.strictEqual(err.killed, true);
- assert.strictEqual(err.code, null);
+ // TODO OpenBSD returns a null signal and 143 for code
+ if (common.isOpenBSD) {
+ assert.strictEqual(err.code, 143);
+ sigterm = null;
+ } else {
+ assert.strictEqual(err.code, null);
+ }
// At least starting with Darwin Kernel Version 16.4.0, sending a SIGTERM to a
// process that is still starting up kills it with SIGKILL instead of SIGTERM.
// See: https://github.com/libuv/libuv/issues/1226
if (common.isOSX)
assert.ok(err.signal === 'SIGTERM' || err.signal === 'SIGKILL');
else
- assert.strictEqual(err.signal, 'SIGTERM');
+ assert.strictEqual(err.signal, sigterm);
assert.strictEqual(err.cmd, cmd);
assert.strictEqual(stdout.trim(), '');
assert.strictEqual(stderr.trim(), '');
diff --git a/test/parallel/test-net-dns-error.js b/test/parallel/test-net-dns-error.js
index 5ca02313686589..5f51c8d5a45c62 100644
--- a/test/parallel/test-net-dns-error.js
+++ b/test/parallel/test-net-dns-error.js
@@ -6,17 +6,21 @@ const net = require('net');
const host = '*'.repeat(256);
+let errCode = 'ENOTFOUND';
+if (common.isOpenBSD)
+ errCode = 'EAI_FAIL';
+
function do_not_call() {
throw new Error('This function should not have been called.');
}
const socket = net.connect(42, host, do_not_call);
socket.on('error', common.mustCall(function(err) {
- assert.strictEqual(err.code, 'ENOTFOUND');
+ assert.strictEqual(err.code, errCode);
}));
socket.on('lookup', function(err, ip, type) {
assert(err instanceof Error);
- assert.strictEqual(err.code, 'ENOTFOUND');
+ assert.strictEqual(err.code, errCode);
assert.strictEqual(ip, undefined);
assert.strictEqual(type, undefined);
});
diff --git a/test/parallel/test-setproctitle.js b/test/parallel/test-setproctitle.js
index 12aea32ba306c7..0b703445ac20f2 100644
--- a/test/parallel/test-setproctitle.js
+++ b/test/parallel/test-setproctitle.js
@@ -30,7 +30,7 @@ exec(cmd, common.mustCall((error, stdout, stderr) => {
assert.strictEqual(stderr, '');
// freebsd always add ' (procname)' to the process title
- if (common.isFreeBSD)
+ if (common.isFreeBSD || common.isOpenBSD)
title += ` (${path.basename(process.execPath)})`;
// omitting trailing whitespace and \n
From 1edadebaa070db550f0f18a0a7ab638de8780876 Mon Sep 17 00:00:00 2001
From: Luigi Pinca
Date: Mon, 19 Feb 2018 14:43:02 +0100
Subject: [PATCH 038/227] http: allow _httpMessage to be GC'ed
Set `socket._httpMessage` to `null` before emitting the `'connect'` or
`'upgrade'` event.
PR-URL: https://github.com/nodejs/node/pull/18865
Reviewed-By: Anna Henningsen
Reviewed-By: Colin Ihrig
Reviewed-By: Ruben Bridgewater
Reviewed-By: Matteo Collina
---
lib/_http_agent.js | 1 +
test/parallel/test-http-connect.js | 7 ++++++-
2 files changed, 7 insertions(+), 1 deletion(-)
diff --git a/lib/_http_agent.js b/lib/_http_agent.js
index f11fa9fd6d2f5f..ce4cd05005e211 100644
--- a/lib/_http_agent.js
+++ b/lib/_http_agent.js
@@ -249,6 +249,7 @@ function installListeners(agent, s, options) {
s.removeListener('close', onClose);
s.removeListener('free', onFree);
s.removeListener('agentRemove', onRemove);
+ s._httpMessage = null;
}
s.on('agentRemove', onRemove);
}
diff --git a/test/parallel/test-http-connect.js b/test/parallel/test-http-connect.js
index 854d2d893416a0..9854c68be98ce2 100644
--- a/test/parallel/test-http-connect.js
+++ b/test/parallel/test-http-connect.js
@@ -28,7 +28,11 @@ server.listen(0, common.mustCall(() => {
path: 'google.com:443'
}, common.mustNotCall());
- req.on('close', common.mustCall(() => {}));
+ req.on('socket', common.mustCall((socket) => {
+ assert.strictEqual(socket._httpMessage, req);
+ }));
+
+ req.on('close', common.mustCall());
req.on('connect', common.mustCall((res, socket, firstBodyChunk) => {
// Make sure this request got removed from the pool.
@@ -39,6 +43,7 @@ server.listen(0, common.mustCall(() => {
// Make sure this socket has detached.
assert(!socket.ondata);
assert(!socket.onend);
+ assert.strictEqual(socket._httpMessage, null);
assert.strictEqual(socket.listeners('connect').length, 0);
assert.strictEqual(socket.listeners('data').length, 0);
From 18acad349c0f0bacdd7e318a5c7a509cad9574c9 Mon Sep 17 00:00:00 2001
From: Luigi Pinca
Date: Sun, 18 Mar 2018 10:59:44 +0100
Subject: [PATCH 039/227] http: make socketPath work with no agent
Currently `Agent.prototype.createConnection()` is called uncoditionally
if the `socketPath` option is used. This throws an error if no agent is
used, preventing, for example, the `socketPath` and `createConnection`
options to be used together.
This commit fixes the issue by falling back to the `createConnection`
option or `net.createConnection()`.
PR-URL: https://github.com/nodejs/node/pull/19425
Reviewed-By: Rod Vagg
Reviewed-By: Matteo Collina
Reviewed-By: Matheus Marchini
Reviewed-By: Chen Gang
---
lib/_http_client.js | 6 +++-
.../test-http-client-unix-socket-no-agent.js | 28 +++++++++++++++++++
2 files changed, 33 insertions(+), 1 deletion(-)
create mode 100644 test/parallel/test-http-client-unix-socket-no-agent.js
diff --git a/lib/_http_client.js b/lib/_http_client.js
index 9d2057814133b7..ea181cff31a482 100644
--- a/lib/_http_client.js
+++ b/lib/_http_client.js
@@ -138,7 +138,11 @@ function ClientRequest(options, cb) {
timeout: self.timeout,
rejectUnauthorized: !!options.rejectUnauthorized
};
- const newSocket = self.agent.createConnection(optionsPath, oncreate);
+ const newSocket = self.agent
+ ? self.agent.createConnection(optionsPath, oncreate)
+ : typeof options.createConnection === 'function'
+ ? options.createConnection(optionsPath, oncreate)
+ : net.createConnection(optionsPath);
if (newSocket && !called) {
called = true;
self.onSocket(newSocket);
diff --git a/test/parallel/test-http-client-unix-socket-no-agent.js b/test/parallel/test-http-client-unix-socket-no-agent.js
new file mode 100644
index 00000000000000..9dd11403c6bf79
--- /dev/null
+++ b/test/parallel/test-http-client-unix-socket-no-agent.js
@@ -0,0 +1,28 @@
+'use strict';
+const common = require('../common');
+const Countdown = require('../common/countdown');
+
+const http = require('http');
+const { createConnection } = require('net');
+
+const server = http.createServer((req, res) => {
+ res.end();
+});
+
+const countdown = new Countdown(2, () => {
+ server.close();
+});
+
+common.refreshTmpDir();
+
+server.listen(common.PIPE, common.mustCall(() => {
+ http.get({ createConnection, socketPath: common.PIPE }, onResponse);
+ http.get({ agent: 0, socketPath: common.PIPE }, onResponse);
+}));
+
+function onResponse(res) {
+ res.on('end', () => {
+ countdown.dec();
+ });
+ res.resume();
+}
From 4d67369c1b4a0a98f1c6a3a29a6b334c4de1e988 Mon Sep 17 00:00:00 2001
From: Vse Mozhet Byt
Date: Mon, 2 Apr 2018 19:27:57 +0300
Subject: [PATCH 040/227] doc: fix various nits
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* Replace 2 hyphens (--) by spaced m-dashes (—) as per STYLE_GUIDE.md.
* Space infix operators.
* Unify quotes in inline code spans (use only single quotes).
* Unify `* Returns:` (eliminate deviations).
* Dedupe spaces.
Backport-PR-URL: https://github.com/nodejs/node/pull/19761
PR-URL: https://github.com/nodejs/node/pull/19743
Reviewed-By: James M Snell
Reviewed-By: Trivikram Kamat
---
doc/api/addons.md | 2 +-
doc/api/buffer.md | 2 +-
doc/api/child_process.md | 17 +++++-----
doc/api/cli.md | 4 +--
doc/api/cluster.md | 24 +++++++-------
doc/api/console.md | 2 +-
doc/api/crypto.md | 2 +-
doc/api/debugger.md | 2 +-
doc/api/dns.md | 6 ++--
doc/api/documentation.md | 6 ++--
doc/api/domain.md | 32 +++++++++---------
doc/api/errors.md | 16 ++++-----
doc/api/fs.md | 38 ++++++++++-----------
doc/api/globals.md | 6 ++--
doc/api/http.md | 72 ++++++++++++++++++++--------------------
doc/api/https.md | 2 +-
doc/api/modules.md | 56 +++++++++++++++----------------
doc/api/net.md | 32 +++++++++---------
doc/api/os.md | 4 +--
doc/api/path.md | 6 ++--
doc/api/process.md | 42 +++++++++++------------
doc/api/readline.md | 2 +-
doc/api/repl.md | 10 +++---
doc/api/stream.md | 8 ++---
doc/api/tls.md | 24 +++++++-------
doc/api/url.md | 4 +--
doc/api/util.md | 10 +++---
doc/api/v8.md | 2 +-
doc/api/vm.md | 2 +-
doc/api/zlib.md | 20 +++++------
30 files changed, 228 insertions(+), 227 deletions(-)
diff --git a/doc/api/addons.md b/doc/api/addons.md
index 17811245f1c23e..4a9b72e1661612 100644
--- a/doc/api/addons.md
+++ b/doc/api/addons.md
@@ -101,7 +101,7 @@ Addon module name is `addon`.
Once the source code has been written, it must be compiled into the binary
`addon.node` file. To do so, create a file called `binding.gyp` in the
top-level of the project describing the build configuration of your module
-using a JSON-like format. This file is used by [node-gyp][] -- a tool written
+using a JSON-like format. This file is used by [node-gyp][] — a tool written
specifically to compile Node.js Addons.
```json
diff --git a/doc/api/buffer.md b/doc/api/buffer.md
index 9583fa15949fd2..2668d43e8754f4 100644
--- a/doc/api/buffer.md
+++ b/doc/api/buffer.md
@@ -395,7 +395,7 @@ deprecated: v6.0.0
* `size` {integer} The desired length of the new `Buffer`.
-Allocates a new `Buffer` of `size` bytes. The `size` must be less than or equal
+Allocates a new `Buffer` of `size` bytes. The `size` must be less than or equal
to the value of [`buffer.kMaxLength`]. Otherwise, a [`RangeError`] is thrown.
A zero-length `Buffer` will be created if `size <= 0`.
diff --git a/doc/api/child_process.md b/doc/api/child_process.md
index b55bd0fea1d874..4759d9bd4a9337 100644
--- a/doc/api/child_process.md
+++ b/doc/api/child_process.md
@@ -38,7 +38,7 @@ the event loop until the spawned process either exits or is terminated.
For convenience, the `child_process` module provides a handful of synchronous
and asynchronous alternatives to [`child_process.spawn()`][] and
-[`child_process.spawnSync()`][]. *Note that each of these alternatives are
+[`child_process.spawnSync()`][]. *Note that each of these alternatives are
implemented on top of [`child_process.spawn()`][] or [`child_process.spawnSync()`][].*
* [`child_process.exec()`][]: spawns a shell and runs a command within that shell,
@@ -140,7 +140,8 @@ added: v0.1.90
command line parsing should be compatible with `cmd.exe`.
* `timeout` {number} **Default:** `0`
* [`maxBuffer`][] {number} Largest amount of data (in bytes) allowed on
- stdout or stderr - if exceeded child process is killed. **Default:** `200*1024`
+ stdout or stderr - if exceeded child process is killed.
+ **Default:** `200 * 1024`.
* `killSignal` {string|integer} **Default:** `'SIGTERM'`
* `uid` {number} Sets the user identity of the process (see setuid(2)).
* `gid` {number} Sets the group identity of the process (see setgid(2)).
@@ -170,7 +171,7 @@ exec('cat *.js bad_file | wc -l', (error, stdout, stderr) => {
```
If a `callback` function is provided, it is called with the arguments
-`(error, stdout, stderr)`. On success, `error` will be `null`. On error,
+`(error, stdout, stderr)`. On success, `error` will be `null`. On error,
`error` will be an instance of [`Error`][]. The `error.code` property will be
the exit code of the child process while `error.signal` will be set to the
signal that terminated the process. Any exit code other than `0` is considered
@@ -218,7 +219,7 @@ added: v0.1.91
* `timeout` {number} **Default:** `0`
* [`maxBuffer`][] {number} Largest amount of data (in bytes) allowed on
stdout or stderr - if exceeded child process is killed.
- **Default:**: `200*1024`
+ **Default:** `200 * 1024`.
* `killSignal` {string|integer} **Default:** `'SIGTERM'`
* `uid` {number} Sets the user identity of the process (see setuid(2)).
* `gid` {number} Sets the group identity of the process (see setgid(2)).
@@ -446,7 +447,7 @@ disabled*.
On non-Windows platforms, if `options.detached` is set to `true`, the child
process will be made the leader of a new process group and session. Note that
child processes may continue running after the parent exits regardless of
-whether they are detached or not. See setsid(2) for more information.
+whether they are detached or not. See setsid(2) for more information.
By default, the parent will wait for the detached child to exit. To prevent
the parent from waiting for a given `subprocess`, use the `subprocess.unref()`
@@ -656,12 +657,12 @@ The `child_process.execSync()` method is generally identical to
[`child_process.exec()`][] with the exception that the method will not return until
the child process has fully closed. When a timeout has been encountered and
`killSignal` is sent, the method won't return until the process has completely
-exited. *Note that if the child process intercepts and handles the `SIGTERM`
+exited. *Note that if the child process intercepts and handles the `SIGTERM`
signal and doesn't exit, the parent process will wait until the child
process has exited.*
If the process times out, or has a non-zero exit code, this method ***will***
-throw. The [`Error`][] object will contain the entire result from
+throw. The [`Error`][] object will contain the entire result from
[`child_process.spawnSync()`][]
**Note: Never pass unsanitised user input to this function. Any input
@@ -997,7 +998,7 @@ properties:
Defaults to `false`.
The optional `callback` is a function that is invoked after the message is
-sent but before the child may have received it. The function is called with a
+sent but before the child may have received it. The function is called with a
single argument: `null` on success, or an [`Error`][] object on failure.
If no `callback` function is provided and the message cannot be sent, an
diff --git a/doc/api/cli.md b/doc/api/cli.md
index f1b33e883ae883..a8aee18e3efd3e 100644
--- a/doc/api/cli.md
+++ b/doc/api/cli.md
@@ -342,7 +342,7 @@ added: v6.12.0
-->
`options...` are interpreted as if they had been specified on the command line
-before the actual command line (so they can be overriden). Node will exit with
+before the actual command line (so they can be overriden). Node will exit with
an error if an option that is not allowed in the environment is used, such as
`-p` or a script file.
@@ -382,7 +382,7 @@ added: v3.0.0
Path to the file used to store the persistent REPL history. The default path is
`~/.node_repl_history`, which is overridden by this variable. Setting the value
-to an empty string (`""` or `" "`) disables persistent REPL history.
+to an empty string (`''` or `' '`) disables persistent REPL history.
### `NODE_TTY_UNSAFE_ASYNC=1`
diff --git a/doc/api/cluster.md b/doc/api/cluster.md
index 0bc768030728ab..fd5b8398bceb69 100644
--- a/doc/api/cluster.md
+++ b/doc/api/cluster.md
@@ -90,26 +90,26 @@ Node.js process and a cluster worker differs:
idea of what the number 7 file descriptor references.
2. `server.listen(handle)` Listening on handles explicitly will cause
the worker to use the supplied handle, rather than talk to the master
- process. If the worker already has the handle, then it's presumed
+ process. If the worker already has the handle, then it's presumed
that you know what you are doing.
3. `server.listen(0)` Normally, this will cause servers to listen on a
- random port. However, in a cluster, each worker will receive the
- same "random" port each time they do `listen(0)`. In essence, the
- port is random the first time, but predictable thereafter. If you
+ random port. However, in a cluster, each worker will receive the
+ same "random" port each time they do `listen(0)`. In essence, the
+ port is random the first time, but predictable thereafter. If you
want to listen on a unique port, generate a port number based on the
cluster worker ID.
There is no routing logic in Node.js, or in your program, and no shared
-state between the workers. Therefore, it is important to design your
+state between the workers. Therefore, it is important to design your
program such that it does not rely too heavily on in-memory data objects
for things like sessions and login.
Because workers are all separate processes, they can be killed or
re-spawned depending on your program's needs, without affecting other
-workers. As long as there are some workers still alive, the server will
-continue to accept connections. If no workers are alive, existing connections
-will be dropped and new connections will be refused. Node.js does not
-automatically manage the number of workers for you, however. It is your
+workers. As long as there are some workers still alive, the server will
+continue to accept connections. If no workers are alive, existing connections
+will be dropped and new connections will be refused. Node.js does not
+automatically manage the number of workers for you, however. It is your
responsibility to manage the worker pool for your application's needs.
Although a primary use case for the `cluster` module is networking, it can
@@ -489,7 +489,7 @@ Emitted after the worker IPC channel has disconnected. This can occur when a
worker exits gracefully, is killed, or is disconnected manually (such as with
worker.disconnect()).
-There may be a delay between the `'disconnect'` and `'exit'` events. These events
+There may be a delay between the `'disconnect'` and `'exit'` events. These events
can be used to detect if the process is stuck in a cleanup or if there are
long-living connections.
@@ -582,7 +582,7 @@ The `addressType` is one of:
* `4` (TCPv4)
* `6` (TCPv6)
* `-1` (unix domain socket)
-* `"udp4"` or `"udp6"` (UDP v4 or v6)
+* `'udp4'` or `'udp6'` (UDP v4 or v6)
## Event: 'message'
@@ -709,7 +709,7 @@ distribute IOCP handles without incurring a large performance hit.
`cluster.schedulingPolicy` can also be set through the
`NODE_CLUSTER_SCHED_POLICY` environment variable. Valid
-values are `"rr"` and `"none"`.
+values are `'rr'` and `'none'`.
## cluster.settings
Returns the Diffie-Hellman generator in the specified `encoding`, which can
-be `'latin1'`, `'hex'`, or `'base64'`. If `encoding` is provided a string is
+be `'latin1'`, `'hex'`, or `'base64'`. If `encoding` is provided a string is
returned; otherwise a [`Buffer`][] is returned.
### diffieHellman.getPrime([encoding])
diff --git a/doc/api/debugger.md b/doc/api/debugger.md
index a24437021b7bea..92b49fc9ed77eb 100644
--- a/doc/api/debugger.md
+++ b/doc/api/debugger.md
@@ -68,7 +68,7 @@ debug> repl
Press Ctrl + C to leave debug repl
> x
5
-> 2+2
+> 2 + 2
4
debug> next
break in /home/indutny/Code/git/indutny/myscript.js:5
diff --git a/doc/api/dns.md b/doc/api/dns.md
index 7fce91970aa10e..e4321b487340cf 100644
--- a/doc/api/dns.md
+++ b/doc/api/dns.md
@@ -212,7 +212,7 @@ will contain an array of IPv4 addresses (e.g.
* `options` {Object}
* `ttl` {boolean} Retrieve the Time-To-Live value (TTL) of each record.
The callback receives an array of `{ address: '1.2.3.4', ttl: 60 }` objects
- rather than an array of strings. The TTL is expressed in seconds.
+ rather than an array of strings. The TTL is expressed in seconds.
* `callback` {Function} An `(err, result)` callback function.
## dns.resolve6(hostname[, options], callback)
@@ -228,7 +228,7 @@ will contain an array of IPv6 addresses.
* `options` {Object}
* `ttl` {boolean} Retrieve the Time-To-Live value (TTL) of each record.
The callback receives an array of `{ address: '0:1:2:3:4:5:6:7', ttl: 60 }`
- objects rather than an array of strings. The TTL is expressed in seconds.
+ objects rather than an array of strings. The TTL is expressed in seconds.
* `callback` {Function} An `(err, result)` callback function.
## dns.resolveCname(hostname, callback)
@@ -258,7 +258,7 @@ added: v0.9.12
Uses the DNS protocol to resolve regular expression based records (`NAPTR`
records) for the `hostname`. The `callback` function has arguments
-`(err, addresses)`. The `addresses` argument passed to the `callback` function
+`(err, addresses)`. The `addresses` argument passed to the `callback` function
will contain an array of objects with the following properties:
* `flags`
diff --git a/doc/api/documentation.md b/doc/api/documentation.md
index 917e3e8134a37c..8a768949f0530b 100644
--- a/doc/api/documentation.md
+++ b/doc/api/documentation.md
@@ -26,10 +26,10 @@ using the `tools/doc/generate.js` program. An HTML template is located at
Throughout the documentation, you will see indications of a section's
-stability. The Node.js API is still somewhat changing, and as it
-matures, certain parts are more reliable than others. Some are so
+stability. The Node.js API is still somewhat changing, and as it
+matures, certain parts are more reliable than others. Some are so
proven, and so relied upon, that they are unlikely to ever change at
-all. Others are brand new and experimental, or known to be hazardous
+all. Others are brand new and experimental, or known to be hazardous
and in the process of being redesigned.
The stability indices are as follows:
diff --git a/doc/api/domain.md b/doc/api/domain.md
index 08d6edc7d8cb24..99a44925034827 100644
--- a/doc/api/domain.md
+++ b/doc/api/domain.md
@@ -12,7 +12,7 @@ but should expect to have to migrate to a different solution
in the future.
Domains provide a way to handle multiple different IO operations as a
-single group. If any of the event emitters or callbacks registered to a
+single group. If any of the event emitters or callbacks registered to a
domain emit an `'error'` event, or throw an error, then the domain object
will be notified, rather than losing the context of the error in the
`process.on('uncaughtException')` handler, or causing the program to
@@ -30,7 +30,7 @@ never any way to safely "pick up where you left off", without leaking
references, or creating some other sort of undefined brittle state.
The safest way to respond to a thrown error is to shut down the
-process. Of course, in a normal web server, you might have many
+process. Of course, in a normal web server, you might have many
connections open, and it is not reasonable to abruptly shut those down
because an error was triggered by someone else.
@@ -40,7 +40,7 @@ time, and stop listening for new requests in that worker.
In this way, `domain` usage goes hand-in-hand with the cluster module,
since the master process can fork a new worker when a worker
-encounters an error. For Node.js programs that scale to multiple
+encounters an error. For Node.js programs that scale to multiple
machines, the terminating proxy or service registry can take note of
the failure, and react accordingly.
@@ -103,7 +103,7 @@ if (cluster.isMaster) {
const domain = require('domain');
// See the cluster documentation for more details about using
- // worker processes to serve requests. How it works, caveats, etc.
+ // worker processes to serve requests. How it works, caveats, etc.
const server = require('http').createServer((req, res) => {
const d = domain.create();
@@ -126,7 +126,7 @@ if (cluster.isMaster) {
// stop taking new requests.
server.close();
- // Let the master know we're dead. This will trigger a
+ // Let the master know we're dead. This will trigger a
// 'disconnect' in the cluster master, and then it will fork
// a new worker.
cluster.worker.disconnect();
@@ -155,7 +155,7 @@ if (cluster.isMaster) {
server.listen(PORT);
}
-// This part is not important. Just an example routing thing.
+// This part is not important. Just an example routing thing.
// You'd put your fancy application logic here.
function handleRequest(req, res) {
switch (req.url) {
@@ -197,11 +197,11 @@ the active domain at the time of their creation.
Additionally, callbacks passed to lowlevel event loop requests (such as
to fs.open, or other callback-taking methods) will automatically be
-bound to the active domain. If they throw, then the domain will catch
+bound to the active domain. If they throw, then the domain will catch
the error.
In order to prevent excessive memory usage, Domain objects themselves
-are not implicitly added as children of the active domain. If they
+are not implicitly added as children of the active domain. If they
were, then it would be too easy to prevent request and response objects
from being properly garbage collected.
@@ -218,7 +218,7 @@ Implicit binding only takes care of thrown errors and `'error'` events.
Sometimes, the domain in use is not the one that ought to be used for a
-specific event emitter. Or, the event emitter could have been created
+specific event emitter. Or, the event emitter could have been created
in the context of one domain, but ought to instead be bound to some
other domain.
@@ -268,7 +268,7 @@ Returns a new Domain object.
The Domain class encapsulates the functionality of routing errors and
uncaught exceptions to the active Domain object.
-Domain is a child class of [`EventEmitter`][]. To handle the errors that it
+Domain is a child class of [`EventEmitter`][]. To handle the errors that it
catches, listen to its `'error'` event.
### domain.run(fn[, ...args])
@@ -318,13 +318,13 @@ to the domain.
* `emitter` {EventEmitter|Timer} emitter or timer to be added to the domain
-Explicitly adds an emitter to the domain. If any event handlers called by
+Explicitly adds an emitter to the domain. If any event handlers called by
the emitter throw an error, or if the emitter emits an `'error'` event, it
will be routed to the domain's `'error'` event, just like with implicit
binding.
This also works with timers that are returned from [`setInterval()`][] and
-[`setTimeout()`][]. If their callback function throws, it will be caught by
+[`setTimeout()`][]. If their callback function throws, it will be caught by
the domain 'error' handler.
If the Timer or EventEmitter was already bound to a domain, it is removed
@@ -334,7 +334,7 @@ from that one, and bound to this one instead.
* `emitter` {EventEmitter|Timer} emitter or timer to be removed from the domain
-The opposite of [`domain.add(emitter)`][]. Removes domain handling from the
+The opposite of [`domain.add(emitter)`][]. Removes domain handling from the
specified emitter.
### domain.bind(callback)
@@ -343,7 +343,7 @@ specified emitter.
* Returns: {Function} The bound function
The returned function will be a wrapper around the supplied callback
-function. When the returned function is called, any errors that are
+function. When the returned function is called, any errors that are
thrown will be routed to the domain's `'error'` event.
#### Example
@@ -370,7 +370,7 @@ d.on('error', (er) => {
* `callback` {Function} The callback function
* Returns: {Function} The intercepted function
-This method is almost identical to [`domain.bind(callback)`][]. However, in
+This method is almost identical to [`domain.bind(callback)`][]. However, in
addition to catching thrown errors, it will also intercept [`Error`][]
objects sent as the first argument to the function.
@@ -439,7 +439,7 @@ without exiting the domain.
### domain.dispose()
-> Stability: 0 - Deprecated. Please recover from failed IO actions
+> Stability: 0 - Deprecated. Please recover from failed IO actions
> explicitly via error event handlers set on the domain.
Once `dispose` has been called, the domain will no longer be used by callbacks
diff --git a/doc/api/errors.md b/doc/api/errors.md
index 49b4bac595a6dc..306b58a908ee5d 100644
--- a/doc/api/errors.md
+++ b/doc/api/errors.md
@@ -107,7 +107,7 @@ pass or fail).
For *all* `EventEmitter` objects, if an `'error'` event handler is not
provided, the error will be thrown, causing the Node.js process to report an
-unhandled exception and crash unless either: The [`domain`][domains] module is used
+unhandled exception and crash unless either: The [`domain`][domains] module is used
appropriately or a handler has been registered for the
[`process.on('uncaughtException')`][] event.
@@ -133,7 +133,7 @@ exactly how errors raised by those methods are propagated.
Most asynchronous methods exposed by the Node.js core API follow an idiomatic
-pattern referred to as a "Node.js style callback". With this pattern, a
+pattern referred to as a "Node.js style callback". With this pattern, a
callback function is passed to the method as an argument. When the operation
either completes or an error is raised, the callback function is called with
the Error object (if any) passed as the first argument. If no error was raised,
@@ -155,7 +155,7 @@ fs.readFile('/some/file/that/does-exist', nodeStyleCallback);
```
The JavaScript `try / catch` mechanism **cannot** be used to intercept errors
-generated by asynchronous APIs. A common mistake for beginners is to try to
+generated by asynchronous APIs. A common mistake for beginners is to try to
use `throw` inside a Node.js style callback:
```js
@@ -204,7 +204,7 @@ provided text message. If an object is passed as `message`, the text message
is generated by calling `message.toString()`. The `error.stack` property will
represent the point in the code at which `new Error()` was called. Stack traces
are dependent on [V8's stack trace API][]. Stack traces extend only to either
-(a) the beginning of *synchronous code execution*, or (b) the number of frames
+(a) the beginning of *synchronous code execution*, or (b) the number of frames
given by the property `Error.stackTraceLimit`, whichever is smaller.
### Error.captureStackTrace(targetObject[, constructorOpt])
@@ -502,7 +502,7 @@ found [here][online].
- `EACCES` (Permission denied): An attempt was made to access a file in a way
forbidden by its file access permissions.
-- `EADDRINUSE` (Address already in use): An attempt to bind a server
+- `EADDRINUSE` (Address already in use): An attempt to bind a server
([`net`][], [`http`][], or [`https`][]) to a local address failed due to
another server on the local system already occupying that address.
@@ -530,14 +530,14 @@ found [here][online].
`ulimit -n 2048` in the same shell that will run the Node.js process.
- `ENOENT` (No such file or directory): Commonly raised by [`fs`][] operations
- to indicate that a component of the specified pathname does not exist -- no
+ to indicate that a component of the specified pathname does not exist — no
entity (file or directory) could be found by the given path.
- `ENOTDIR` (Not a directory): A component of the given pathname existed, but
was not a directory as expected. Commonly raised by [`fs.readdir`][].
- `ENOTEMPTY` (Directory not empty): A directory with entries was the target
- of an operation that requires an empty directory -- usually [`fs.unlink`][].
+ of an operation that requires an empty directory — usually [`fs.unlink`][].
- `EPERM` (Operation not permitted): An attempt was made to perform an
operation that requires elevated privileges.
@@ -549,7 +549,7 @@ found [here][online].
- `ETIMEDOUT` (Operation timed out): A connect or send request failed because
the connected party did not properly respond after a period of time. Usually
- encountered by [`http`][] or [`net`][] -- often a sign that a `socket.end()`
+ encountered by [`http`][] or [`net`][] — often a sign that a `socket.end()`
was not properly called.
[`fs.readdir`]: fs.html#fs_fs_readdir_path_options_callback
diff --git a/doc/api/fs.md b/doc/api/fs.md
index b47e639e3ca98f..74e3cc3c460c29 100644
--- a/doc/api/fs.md
+++ b/doc/api/fs.md
@@ -6,7 +6,7 @@
-File I/O is provided by simple wrappers around standard POSIX functions. To
+File I/O is provided by simple wrappers around standard POSIX functions. To
use this module do `require('fs')`. All the methods have asynchronous and
synchronous forms.
@@ -67,7 +67,7 @@ fs.rename('/tmp/hello', '/tmp/world', (err) => {
In busy processes, the programmer is _strongly encouraged_ to use the
asynchronous versions of these calls. The synchronous versions will block
-the entire process until they complete--halting all connections.
+the entire process until they complete — halting all connections.
The relative path to a filename can be used. Remember, however, that this path
will be relative to `process.cwd()`.
@@ -252,16 +252,16 @@ page.
The times in the stat object have the following semantics:
-* `atime` "Access Time" - Time when file data last accessed. Changed
+* `atime` "Access Time" - Time when file data last accessed. Changed
by the mknod(2), utimes(2), and read(2) system calls.
* `mtime` "Modified Time" - Time when file data last modified.
Changed by the mknod(2), utimes(2), and write(2) system calls.
* `ctime` "Change Time" - Time when file status was last changed
- (inode data modification). Changed by the chmod(2), chown(2),
+ (inode data modification). Changed by the chmod(2), chown(2),
link(2), mknod(2), rename(2), unlink(2), utimes(2),
read(2), and write(2) system calls.
-* `birthtime` "Birth Time" - Time of file creation. Set once when the
- file is created. On filesystems where birthtime is not available,
+* `birthtime` "Birth Time" - Time of file creation. Set once when the
+ file is created. On filesystems where birthtime is not available,
this field may instead hold either the `ctime` or
`1970-01-01T00:00Z` (ie, unix epoch timestamp `0`). Note that this
value may be greater than `atime` or `mtime` in this case. On Darwin
@@ -270,7 +270,7 @@ The times in the stat object have the following semantics:
utimes(2) system call.
Prior to Node v0.12, the `ctime` held the `birthtime` on Windows
-systems. Note that as of v0.12, `ctime` is not "creation time", and
+systems. Note that as of v0.12, `ctime` is not "creation time", and
on Unix systems, it never was.
## Class: fs.WriteStream
@@ -567,7 +567,7 @@ added: v0.0.2
* `callback` {Function}
* `err` {Error}
-Asynchronous close(2). No arguments other than a possible exception are given
+Asynchronous close(2). No arguments other than a possible exception are given
to the completion callback.
## fs.closeSync(fd)
@@ -621,7 +621,7 @@ const defaults = {
```
`options` can include `start` and `end` values to read a range of bytes from
-the file instead of the entire file. Both `start` and `end` are inclusive and
+the file instead of the entire file. Both `start` and `end` are inclusive and
start counting at 0. If `fd` is specified and `start` is omitted or `undefined`,
`fs.createReadStream()` reads sequentially from the current file position.
The `encoding` can be any one of those accepted by [`Buffer`][].
@@ -632,8 +632,8 @@ emitted. Note that `fd` should be blocking; non-blocking `fd`s should be passed
to [`net.Socket`][].
If `autoClose` is false, then the file descriptor won't be closed, even if
-there's an error. It is your responsibility to close it and make sure
-there's no file descriptor leak. If `autoClose` is set to true (default
+there's an error. It is your responsibility to close it and make sure
+there's no file descriptor leak. If `autoClose` is set to true (default
behavior), on `error` or `end` the file descriptor will be closed
automatically.
@@ -677,7 +677,7 @@ const defaults = {
```
`options` may also include a `start` option to allow writing data at
-some position past the beginning of the file. Modifying a file rather
+some position past the beginning of the file. Modifying a file rather
than replacing it may require a `flags` mode of `r+` rather than the
default mode `w`. The `defaultEncoding` can be any one of those accepted by
[`Buffer`][].
@@ -708,7 +708,7 @@ deprecated: v1.0.0
* `exists` {Boolean}
Test whether or not the given path exists by checking with the file system.
-Then call the `callback` argument with either true or false. Example:
+Then call the `callback` argument with either true or false. Example:
```js
fs.exists('/etc/passwd', (exists) => {
@@ -1286,7 +1286,7 @@ to a non-existent file. The exclusive flag may or may not work with network file
systems.
`flags` can also be a number as documented by open(2); commonly used constants
-are available from `fs.constants`. On Windows, flags are translated to
+are available from `fs.constants`. On Windows, flags are translated to
their equivalent ones where applicable, e.g. `O_WRONLY` to `FILE_GENERIC_WRITE`,
or `O_EXCL|O_CREAT` to `CREATE_NEW`, as accepted by CreateFileW.
@@ -1373,7 +1373,7 @@ added: v0.1.8
* `err` {Error}
* `files` {string[]|Buffer[]}
-Asynchronous readdir(3). Reads the contents of a directory.
+Asynchronous readdir(3). Reads the contents of a directory.
The callback gets two arguments `(err, files)` where `files` is an array of
the names of the files in the directory excluding `'.'` and `'..'`.
@@ -1809,12 +1809,12 @@ added: v0.5.10
* `filename` {string|Buffer}
Watch for changes on `filename`, where `filename` is either a file or a
-directory. The returned object is a [`fs.FSWatcher`][].
+directory. The returned object is a [`fs.FSWatcher`][].
The second argument is optional. If `options` is provided as a string, it
specifies the `encoding`. Otherwise `options` should be passed as an object.
-The listener callback gets two arguments `(eventType, filename)`. `eventType` is either
+The listener callback gets two arguments `(eventType, filename)`. `eventType` is either
`'rename'` or `'change'`, and `filename` is the name of the file which triggered
the event.
@@ -1879,7 +1879,7 @@ this improves the usability of file watching. This is expected behavior.
Providing `filename` argument in the callback is only supported on Linux and
-Windows. Even on supported platforms, `filename` is not always guaranteed to
+Windows. Even on supported platforms, `filename` is not always guaranteed to
be provided. Therefore, don't assume that `filename` argument is always
provided in the callback, and have some fallback logic if it is null.
@@ -2000,7 +2000,7 @@ added: v0.11.5
* `written` {integer}
* `string` {string}
-Write `string` to the file specified by `fd`. If `string` is not a string, then
+Write `string` to the file specified by `fd`. If `string` is not a string, then
the value will be coerced to one.
`position` refers to the offset from the beginning of the file where this data
diff --git a/doc/api/globals.md b/doc/api/globals.md
index 054bc2300bd411..79b11d0bde6c0e 100644
--- a/doc/api/globals.md
+++ b/doc/api/globals.md
@@ -188,7 +188,7 @@ added: v0.1.13
* {Function}
-To require modules. See the [Modules][] section. `require` is not actually a
+To require modules. See the [Modules][] section. `require` is not actually a
global but rather local to each module.
### require.cache
@@ -221,14 +221,14 @@ Process files with the extension `.sjs` as `.js`:
require.extensions['.sjs'] = require.extensions['.js'];
```
-**Deprecated** In the past, this list has been used to load
+**Deprecated** In the past, this list has been used to load
non-JavaScript modules into Node.js by compiling them on-demand.
However, in practice, there are much better ways to do this, such as
loading modules via some other Node.js program, or compiling them to
JavaScript ahead of time.
Since the Module system is locked, this feature will probably never go
-away. However, it may have subtle bugs and complexities that are best
+away. However, it may have subtle bugs and complexities that are best
left untouched.
### require.resolve()
diff --git a/doc/api/http.md b/doc/api/http.md
index 597aa3fda179ed..00b0bb5b3b8c88 100644
--- a/doc/api/http.md
+++ b/doc/api/http.md
@@ -9,7 +9,7 @@ To use the HTTP server and client one must `require('http')`.
The HTTP interfaces in Node.js are designed to support many features
of the protocol which have been traditionally difficult to use.
In particular, large, possibly chunk-encoded, messages. The interface is
-careful to never buffer entire requests or responses--the
+careful to never buffer entire requests or responses — the
user is able to stream data.
HTTP message headers are represented by an object like this:
@@ -33,7 +33,7 @@ parse the actual headers or the body.
See [`message.headers`][] for details on how duplicate headers are handled.
The raw headers as they were received are retained in the `rawHeaders`
-property, which is an array of `[key, value, key2, value2, ...]`. For
+property, which is an array of `[key, value, key2, value2, ...]`. For
example, the previous message header object might have a `rawHeaders`
list like the following:
@@ -122,9 +122,9 @@ added: v0.3.4
for TCP Keep-Alive packets. Ignored when the
`keepAlive` option is `false` or `undefined`. Default = `1000`.
* `maxSockets` {number} Maximum number of sockets to allow per
- host. Default = `Infinity`.
+ host. Default = `Infinity`.
* `maxFreeSockets` {number} Maximum number of sockets to leave open
- in a free state. Only relevant if `keepAlive` is set to `true`.
+ in a free state. Only relevant if `keepAlive` is set to `true`.
Default = `256`.
The default [`http.globalAgent`][] that is used by [`http.request()`][] has all
@@ -202,9 +202,9 @@ added: v0.11.4
Destroy any sockets that are currently in use by the agent.
-It is usually not necessary to do this. However, if you are using an
+It is usually not necessary to do this. However, if you are using an
agent with `keepAlive` enabled, then it is best to explicitly shut down
-the agent when you know that it will no longer be used. Otherwise,
+the agent when you know that it will no longer be used. Otherwise,
sockets may hang open for quite a long time before the server
terminates them.
@@ -216,7 +216,7 @@ added: v0.11.4
* {Object}
An object which contains arrays of sockets currently awaiting use by
-the agent when `keepAlive` is enabled. Do not modify.
+the agent when `keepAlive` is enabled. Do not modify.
### agent.getName(options)
-This object is created internally and returned from [`http.request()`][]. It
-represents an _in-progress_ request whose header has already been queued. The
+This object is created internally and returned from [`http.request()`][]. It
+represents an _in-progress_ request whose header has already been queued. The
header is still mutable using the `setHeader(name, value)`, `getHeader(name)`,
-`removeHeader(name)` API. The actual header will be sent along with the first
+`removeHeader(name)` API. The actual header will be sent along with the first
data chunk or when closing the connection.
To get the response, add a listener for [`'response'`][] to the request object.
[`'response'`][] will be emitted from the request object when the response
-headers have been received. The [`'response'`][] event is executed with one
+headers have been received. The [`'response'`][] event is executed with one
argument which is an instance of [`http.IncomingMessage`][].
During the [`'response'`][] event, one can add listeners to the
response object; particularly to listen for the `'data'` event.
If no [`'response'`][] handler is added, then the response will be
-entirely discarded. However, if you add a [`'response'`][] event handler,
+entirely discarded. However, if you add a [`'response'`][] event handler,
then you **must** consume the data from the response object, either by
calling `response.read()` whenever there is a `'readable'` event, or
by adding a `'data'` handler, or by calling the `.resume()` method.
-Until the data is consumed, the `'end'` event will not fire. Also, until
+Until the data is consumed, the `'end'` event will not fire. Also, until
the data is read it will consume memory that can eventually lead to a
'process out of memory' error.
@@ -517,11 +517,11 @@ added: v1.6.0
Flush the request headers.
For efficiency reasons, Node.js normally buffers the request headers until you
-call `request.end()` or write the first chunk of request data. It then tries
+call `request.end()` or write the first chunk of request data. It then tries
hard to pack the request headers and data into a single TCP packet.
That's usually what you want (it saves a TCP round-trip) but not when the first
-data is not sent until possibly much later. `request.flushHeaders()` lets you bypass
+data is not sent until possibly much later. `request.flushHeaders()` lets you bypass
the optimization and kickstart the request.
### request.setNoDelay([noDelay])
@@ -567,9 +567,9 @@ added: v0.1.29
* `encoding` {string}
* `callback` {Function}
-Sends a chunk of the body. By calling this method
+Sends a chunk of the body. By calling this method
many times, the user can stream a request body to a
-server--in that case it is suggested to use the
+server — in that case it is suggested to use the
`['Transfer-Encoding', 'chunked']` header line when
creating the request.
@@ -733,7 +733,7 @@ added: v0.1.90
* `callback` {Function}
-Stops the server from accepting new connections. See [`net.Server.close()`][].
+Stops the server from accepting new connections. See [`net.Server.close()`][].
### server.listen(handle[, callback])
-This object is created internally by an HTTP server--not by the user. It is
+This object is created internally by an HTTP server — not by the user. It is
passed as the second parameter to the [`'request'`][] event.
The response implements, but does not inherit from, the [Writable Stream][]
@@ -1015,8 +1015,8 @@ added: v0.4.0
* `name` {string}
* `value` {string}
-Sets a single header value for implicit headers. If this header already exists
-in the to-be-sent headers, its value will be replaced. Use an array of strings
+Sets a single header value for implicit headers. If this header already exists
+in the to-be-sent headers, its value will be replaced. Use an array of strings
here if you need to send multiple headers with the same name.
Example:
@@ -1056,12 +1056,12 @@ added: v0.9.12
* `msecs` {number}
* `callback` {Function}
-Sets the Socket's timeout value to `msecs`. If a callback is
+Sets the Socket's timeout value to `msecs`. If a callback is
provided, then it is added as a listener on the `'timeout'` event on
the response object.
If no `'timeout'` listener is added to the request, the response, or
-the server, then sockets are destroyed when they time out. If you
+the server, then sockets are destroyed when they time out. If you
assign a handler on the request, the response, or the server's
`'timeout'` events, then it is your responsibility to handle timed out
sockets.
@@ -1313,8 +1313,8 @@ added: v0.11.6
The raw request/response headers list exactly as they were received.
-Note that the keys and values are in the same list. It is *not* a
-list of tuples. So, the even-numbered offsets are key values, and the
+Note that the keys and values are in the same list. It is *not* a
+list of tuples. So, the even-numbered offsets are key values, and the
odd-numbered offsets are the associated values.
Header names are not lowercased, and duplicates are not merged.
@@ -1341,7 +1341,7 @@ added: v0.11.6
* {Array}
The raw request/response trailer keys and values exactly as they were
-received. Only populated at the `'end'` event.
+received. Only populated at the `'end'` event.
### message.setTimeout(msecs, callback)
-An Agent object for HTTPS similar to [`http.Agent`][]. See [`https.request()`][]
+An Agent object for HTTPS similar to [`http.Agent`][]. See [`https.request()`][]
for more information.
## Class: https.Server
diff --git a/doc/api/modules.md b/doc/api/modules.md
index e87b209395fb1a..59c675f17c813e 100644
--- a/doc/api/modules.md
+++ b/doc/api/modules.md
@@ -28,7 +28,7 @@ exports.circumference = (r) => 2 * PI * r;
```
The module `circle.js` has exported the functions `area()` and
-`circumference()`. To add functions and objects to the root of your module,
+`circumference()`. To add functions and objects to the root of your module,
you can add them to the special `exports` object.
Variables local to the module will be private, because the module is wrapped
@@ -92,7 +92,7 @@ Let's say that we wanted to have the folder at
specific version of a package.
Packages can depend on one another. In order to install package `foo`, you
-may have to install a specific version of package `bar`. The `bar` package
+may have to install a specific version of package `bar`. The `bar` package
may itself have dependencies, and in some cases, these dependencies may even
collide or form cycles.
@@ -121,12 +121,12 @@ the version that is symlinked into
Furthermore, to make the module lookup process even more optimal, rather
than putting packages directly in `/usr/lib/node`, we could put them in
-`/usr/lib/node_modules//`. Then Node.js will not bother
+`/usr/lib/node_modules//`. Then Node.js will not bother
looking for missing dependencies in `/usr/node_modules` or `/node_modules`.
In order to make modules available to the Node.js REPL, it might be useful to
also add the `/usr/lib/node_modules` folder to the `$NODE_PATH` environment
-variable. Since the module lookups using `node_modules` folders are all
+variable. Since the module lookups using `node_modules` folders are all
relative, and based on the real path of the files making the calls to
`require()`, the packages themselves can be anywhere.
@@ -194,12 +194,12 @@ NODE_MODULES_PATHS(START)
-Modules are cached after the first time they are loaded. This means
+Modules are cached after the first time they are loaded. This means
(among other things) that every call to `require('foo')` will get
exactly the same object returned, if it would resolve to the same file.
Multiple calls to `require('foo')` may not cause the module code to be
-executed multiple times. This is an important feature. With it,
+executed multiple times. This is an important feature. With it,
"partially done" objects can be returned, thus allowing transitive
dependencies to be loaded even when they would cause cycles.
@@ -210,7 +210,7 @@ function, and call that function.
-Modules are cached based on their resolved filename. Since modules may
+Modules are cached based on their resolved filename. Since modules may
resolve to a different filename based on the location of the calling
module (loading from `node_modules` folders), it is not a *guarantee*
that `require('foo')` will always return the exact same object, if it
@@ -226,14 +226,14 @@ irrespective of whether or not `./foo` and `./FOO` are the same file.
-Node.js has several modules compiled into the binary. These modules are
+Node.js has several modules compiled into the binary. These modules are
described in greater detail elsewhere in this documentation.
The core modules are defined within Node.js's source and are located in the
`lib/` folder.
Core modules are always preferentially loaded if their identifier is
-passed to `require()`. For instance, `require('http')` will always
+passed to `require()`. For instance, `require('http')` will always
return the built in HTTP module, even if there is a file by that name.
## Cycles
@@ -273,13 +273,13 @@ console.log('b done');
console.log('main starting');
const a = require('./a.js');
const b = require('./b.js');
-console.log('in main, a.done=%j, b.done=%j', a.done, b.done);
+console.log('in main, a.done = %j, b.done = %j', a.done, b.done);
```
-When `main.js` loads `a.js`, then `a.js` in turn loads `b.js`. At that
-point, `b.js` tries to load `a.js`. In order to prevent an infinite
+When `main.js` loads `a.js`, then `a.js` in turn loads `b.js`. At that
+point, `b.js` tries to load `a.js`. In order to prevent an infinite
loop, an **unfinished copy** of the `a.js` exports object is returned to the
-`b.js` module. `b.js` then finishes loading, and its `exports` object is
+`b.js` module. `b.js` then finishes loading, and its `exports` object is
provided to the `a.js` module.
By the time `main.js` has loaded both modules, they're both finished.
@@ -294,7 +294,7 @@ in b, a.done = false
b done
in a, b.done = true
a done
-in main, a.done=true, b.done=true
+in main, a.done = true, b.done = true
```
If you have cyclic module dependencies in your program, make sure to
@@ -312,7 +312,7 @@ required filename with the added extensions: `.js`, `.json`, and finally
parsed as JSON text files. `.node` files are interpreted as compiled addon
modules loaded with `dlopen`.
-A required module prefixed with `'/'` is an absolute path to the file. For
+A required module prefixed with `'/'` is an absolute path to the file. For
example, `require('/home/marco/foo.js')` will load the file at
`/home/marco/foo.js`.
@@ -336,7 +336,7 @@ There are three ways in which a folder may be passed to `require()` as
an argument.
The first is to create a `package.json` file in the root of the folder,
-which specifies a `main` module. An example package.json file might
+which specifies a `main` module. An example package.json file might
look like this:
```json
@@ -350,7 +350,7 @@ If this was in a folder at `./some-library`, then
This is the extent of Node.js's awareness of package.json files.
-Note: If the file specified by the `"main"` entry of `package.json` is missing
+Note: If the file specified by the `'main'` entry of `package.json` is missing
and can not be resolved, Node.js will report the entire module as missing with
the default error:
@@ -360,7 +360,7 @@ Error: Cannot find module 'some-library'
If there is no package.json file present in the directory, then Node.js
will attempt to load an `index.js` or `index.node` file out of that
-directory. For example, if there was no package.json file in the above
+directory. For example, if there was no package.json file in the above
example, then `require('./some-library')` would attempt to load:
* `./some-library/index.js`
@@ -403,7 +403,7 @@ same module resolution semantics.
If the `NODE_PATH` environment variable is set to a colon-delimited list
of absolute paths, then Node.js will search those paths for modules if they
-are not found elsewhere. (Note: On Windows, `NODE_PATH` is delimited by
+are not found elsewhere. (Note: On Windows, `NODE_PATH` is delimited by
semicolons instead of colons.)
`NODE_PATH` was originally created to support loading modules from
@@ -412,7 +412,7 @@ varying paths before the current [module resolution][] algorithm was frozen.
`NODE_PATH` is still supported, but is less necessary now that the Node.js
ecosystem has settled on a convention for locating dependent modules.
Sometimes deployments that rely on `NODE_PATH` show surprising behavior
-when people are unaware that `NODE_PATH` must be set. Sometimes a
+when people are unaware that `NODE_PATH` must be set. Sometimes a
module's dependencies change, causing a different version (or even a
different module) to be loaded as the `NODE_PATH` is searched.
@@ -425,8 +425,8 @@ Additionally, Node.js will search in the following locations:
Where `$HOME` is the user's home directory, and `$PREFIX` is Node.js's
configured `node_prefix`.
-These are mostly for historic reasons. **You are highly encouraged
-to place your dependencies locally in `node_modules` folders.** They
+These are mostly for historic reasons. **You are highly encouraged
+to place your dependencies locally in `node_modules` folders.** They
will be loaded faster, and more reliably.
## The module wrapper
@@ -464,7 +464,7 @@ added: v0.1.16
* {Object}
In each module, the `module` free variable is a reference to the object
-representing the current module. For convenience, `module.exports` is
+representing the current module. For convenience, `module.exports` is
also accessible via the `exports` module-global. `module` is not actually
a global but rather local to each module.
@@ -515,7 +515,7 @@ a.on('ready', () => {
Note that assignment to `module.exports` must be done immediately. It cannot be
-done in any callbacks. This does not work:
+done in any callbacks. This does not work:
x.js:
@@ -595,7 +595,7 @@ added: v0.1.16
* {string}
-The identifier for the module. Typically this is the fully resolved
+The identifier for the module. Typically this is the fully resolved
filename.
### module.loaded
@@ -629,7 +629,7 @@ The `module.require` method provides a way to load a module as if
`require()` was called from the original module.
Note that in order to do this, you must get a reference to the `module`
-object. Since `require()` returns the `module.exports`, and the `module` is
+object. Since `require()` returns the `module.exports`, and the `module` is
typically *only* available within a specific module's code, it must be
explicitly exported in order to be used.
@@ -642,7 +642,7 @@ added: v0.3.7
* {Object}
Provides general utility methods when interacting with instances of
-`Module` -- the `module` variable often seen in file modules. Accessed
+`Module` — the `module` variable often seen in file modules. Accessed
via `require('module')`.
### module.builtinModules
@@ -652,7 +652,7 @@ added: v6.13.0
* {string[]}
-A list of the names of all modules provided by Node.js. Can be used to verify
+A list of the names of all modules provided by Node.js. Can be used to verify
if a module is maintained by a third-party module or not.
[`__dirname`]: #modules_dirname
diff --git a/doc/api/net.md b/doc/api/net.md
index 8e40c2297df77a..0c9adf4bd1ab9d 100644
--- a/doc/api/net.md
+++ b/doc/api/net.md
@@ -121,7 +121,7 @@ connections use asynchronous `server.getConnections` instead.
added: v0.9.7
-->
-* Returns {net.Server}
+* Returns: {net.Server}
Asynchronously get the number of concurrent connections on the server. Works
when sockets were sent to forks.
@@ -147,7 +147,7 @@ already been bound to a port or domain socket.
Listening on a file descriptor is not supported on Windows.
-This function is asynchronous. When the server has been bound,
+This function is asynchronous. When the server has been bound,
[`'listening'`][] event will be emitted.
The last parameter `callback` will be added as a listener for the
[`'listening'`][] event.
@@ -203,8 +203,8 @@ added: v0.1.90
Start a local socket server listening for connections on the given `path`.
-This function is asynchronous. When the server has been bound,
-[`'listening'`][] event will be emitted. The last parameter `callback`
+This function is asynchronous. When the server has been bound,
+[`'listening'`][] event will be emitted. The last parameter `callback`
will be added as a listener for the [`'listening'`][] event.
On UNIX, the local domain is usually known as the UNIX domain. The path is a
@@ -218,7 +218,7 @@ unlinked*.
On Windows, the local domain is implemented using a named pipe. The path *must*
refer to an entry in `\\?\pipe\` or `\\.\pipe\`. Any characters are permitted,
but the latter may do some processing of pipe names, such as resolving `..`
-sequences. Despite appearances, the pipe name space is flat. Pipes will *not
+sequences. Despite appearances, the pipe name space is flat. Pipes will *not
persist*, they are removed when the last reference to them is closed. Do not
forget JavaScript string escaping requires paths to be specified with
double-backslashes, such as:
@@ -252,8 +252,8 @@ The actual length will be determined by the OS through sysctl settings such as
`tcp_max_syn_backlog` and `somaxconn` on Linux. The default value of this
parameter is 511 (not 512).
-This function is asynchronous. When the server has been bound,
-[`'listening'`][] event will be emitted. The last parameter `callback`
+This function is asynchronous. When the server has been bound,
+[`'listening'`][] event will be emitted. The last parameter `callback`
will be added as a listener for the [`'listening'`][] event.
One issue some users run into is getting `EADDRINUSE` errors. This means that
@@ -323,8 +323,8 @@ active server in the event system. If the server is already `unref`d calling
added: v0.3.4
-->
-This object is an abstraction of a TCP or local socket. `net.Socket`
-instances implement a duplex Stream interface. They can be created by the
+This object is an abstraction of a TCP or local socket. `net.Socket`
+instances implement a duplex Stream interface. They can be created by the
user and used as a client (with [`connect()`][]) or they can be created by Node.js
and passed to the user through the `'connection'` event of a server.
@@ -379,8 +379,8 @@ added: v0.1.90
* {Buffer}
-Emitted when data is received. The argument `data` will be a `Buffer` or
-`String`. Encoding of data is set by `socket.setEncoding()`.
+Emitted when data is received. The argument `data` will be a `Buffer` or
+`String`. Encoding of data is set by `socket.setEncoding()`.
(See the [Readable Stream][] section for more information.)
Note that the **data will be lost** if there is no listener when a `Socket`
@@ -403,7 +403,7 @@ added: v0.1.90
Emitted when the other end of the socket sends a FIN packet.
By default (`allowHalfOpen == false`) the socket will destroy its file
-descriptor once it has written out its pending write queue. However, by
+descriptor once it has written out its pending write queue. However, by
setting `allowHalfOpen == true` the socket will not automatically `end()`
its side allowing the user to write arbitrary amounts of data, with the
caveat that the user is required to `end()` their side now.
@@ -415,7 +415,7 @@ added: v0.1.90
* {Error}
-Emitted when an error occurs. The `'close'` event will be called directly
+Emitted when an error occurs. The `'close'` event will be called directly
following this event.
### Event: 'lookup'
@@ -426,9 +426,9 @@ added: v0.11.3
Emitted after resolving the hostname but before connecting.
Not applicable to UNIX sockets.
-* `err` {Error|null} The error object. See [`dns.lookup()`][].
+* `err` {Error|null} The error object. See [`dns.lookup()`][].
* `address` {string} The IP address.
-* `family` {string|null} The address type. See [`dns.lookup()`][].
+* `family` {string|null} The address type. See [`dns.lookup()`][].
* `host` {string} The hostname.
### Event: 'timeout'
@@ -718,7 +718,7 @@ added: v0.1.90
-->
Sends data on the socket. The second parameter specifies the encoding in the
-case of a string--it defaults to UTF8 encoding.
+case of a string — it defaults to UTF8 encoding.
Returns `true` if the entire data was flushed successfully to the kernel
buffer. Returns `false` if all or part of the data was queued in user memory.
diff --git a/doc/api/os.md b/doc/api/os.md
index 011ce5e2bf1742..b8f5c76ce00793 100644
--- a/doc/api/os.md
+++ b/doc/api/os.md
@@ -225,7 +225,7 @@ The `os.loadavg()` method returns an array containing the 1, 5, and 15 minute
load averages.
The load average is a measure of system activity, calculated by the operating
-system and expressed as a fractional number. As a rule of thumb, the load
+system and expressed as a fractional number. As a rule of thumb, the load
average should ideally be less than the number of logical CPUs in the system.
The load average is a UNIX-specific concept with no real equivalent on
@@ -392,7 +392,7 @@ added: v6.0.0
* Returns: {Object}
The `os.userInfo()` method returns information about the currently effective
-user -- on POSIX platforms, this is typically a subset of the password file. The
+user — on POSIX platforms, this is typically a subset of the password file. The
returned object includes the `username`, `uid`, `gid`, `shell`, and `homedir`.
On Windows, the `uid` and `gid` fields are `-1`, and `shell` is `null`.
diff --git a/doc/api/path.md b/doc/api/path.md
index 807cf9b88c9869..7ab4dabb79e04f 100644
--- a/doc/api/path.md
+++ b/doc/api/path.md
@@ -145,7 +145,7 @@ added: v0.1.25
The `path.extname()` method returns the extension of the `path`, from the last
occurrence of the `.` (period) character to end of string in the last portion of
-the `path`. If there is no `.` in the last portion of the `path`, or if the
+the `path`. If there is no `.` in the last portion of the `path`, or if the
first character of the basename of `path` (see `path.basename()`) is `.`, then
an empty string is returned.
@@ -380,7 +380,7 @@ path.parse('/home/user/dir/file.txt');
│ root │ │ name │ ext │
" / home/user/dir / file .txt "
└──────┴──────────────┴──────┴─────┘
-(all spaces in the "" line should be ignored -- they are purely for formatting)
+(all spaces in the "" line should be ignored — they are purely for formatting)
```
On Windows:
@@ -404,7 +404,7 @@ path.parse('C:\\path\\dir\\file.txt');
│ root │ │ name │ ext │
" C:\ path\dir \ file .txt "
└──────┴──────────────┴──────┴─────┘
-(all spaces in the "" line should be ignored -- they are purely for formatting)
+(all spaces in the "" line should be ignored — they are purely for formatting)
```
A [`TypeError`][] is thrown if `path` is not a string.
diff --git a/doc/api/process.md b/doc/api/process.md
index c9aebfa8d2cce4..af20ddb1f751b1 100644
--- a/doc/api/process.md
+++ b/doc/api/process.md
@@ -189,7 +189,7 @@ process will exit with a non-zero exit code and the stack trace will be printed.
This is to avoid infinite recursion.
Attempting to resume normally after an uncaught exception can be similar to
-pulling out of the power cord when upgrading a computer -- nine out of ten
+pulling out of the power cord when upgrading a computer — nine out of ten
times nothing happens - but the 10th time, the system becomes corrupted.
The correct use of `'uncaughtException'` is to perform synchronous cleanup
@@ -388,7 +388,7 @@ For example:
process.stdin.resume();
process.on('SIGINT', () => {
- console.log('Received SIGINT. Press Control-D to exit.');
+ console.log('Received SIGINT. Press Control-D to exit.');
});
```
@@ -397,7 +397,7 @@ terminal programs.
It is important to take note of the following:
-* `SIGUSR1` is reserved by Node.js to start the debugger. It's possible to
+* `SIGUSR1` is reserved by Node.js to start the debugger. It's possible to
install a listener but doing so will _not_ stop the debugger from starting.
* `SIGTERM` and `SIGINT` have default handlers on non-Windows platforms that
resets the terminal mode before exiting with code `128 + signal number`. If
@@ -470,7 +470,7 @@ added: v0.1.27
The `process.argv` property returns an array containing the command line
arguments passed when the Node.js process was launched. The first element will
be [`process.execPath`]. See `process.argv0` if access to the original value of
-`argv[0]` is needed. The second element will be the path to the JavaScript
+`argv[0]` is needed. The second element will be the path to the JavaScript
file being executed. The remaining elements will be any additional command line
arguments.
@@ -890,7 +890,7 @@ added: v0.1.13
The `process.exit()` method instructs Node.js to terminate the process
synchronously with an exit status of `code`. If `code` is omitted, exit uses
either the 'success' code `0` or the value of `process.exitCode` if it has been
-set. Node.js will not terminate until all the [`'exit'`] event listeners are
+set. Node.js will not terminate until all the [`'exit'`] event listeners are
called.
To exit with a 'failure' code:
@@ -1129,7 +1129,7 @@ Windows platforms will throw an error if the `pid` is used to kill a process
group.
*Note*:Even though the name of this function is `process.kill()`, it is really
-just a signal sender, like the `kill` system call. The signal sent may do
+just a signal sender, like the `kill` system call. The signal sent may do
something other than kill the target process.
For example:
@@ -1219,7 +1219,7 @@ Once the current turn of the event loop turn runs to completion, all callbacks
currently in the next tick queue will be called.
This is *not* a simple alias to [`setTimeout(fn, 0)`][]. It is much more
-efficient. It runs before any additional I/O events (including
+efficient. It runs before any additional I/O events (including
timers) fire in subsequent ticks of the event loop.
```js
@@ -1254,7 +1254,7 @@ thing.getReadyForStuff();
```
It is very important for APIs to be either 100% synchronous or 100%
-asynchronous. Consider this example:
+asynchronous. Consider this example:
```js
// WARNING! DO NOT USE! BAD UNSAFE HAZARD!
@@ -1296,7 +1296,7 @@ function definitelyAsync(arg, cb) {
```
*Note*: the next tick queue is completely drained on each pass of the
-event loop **before** additional I/O is processed. As a result,
+event loop **before** additional I/O is processed. As a result,
recursively setting nextTick callbacks will block any I/O from
happening, just like a `while(true);` loop.
@@ -1371,8 +1371,8 @@ tarball.
Support) line the current release is part of. This property only exists for
LTS releases and is `undefined` for all other release types, including stable
releases. Current valid values are:
- - `"Argon"` for the v4.x LTS line beginning with v4.2.0.
- - `"Boron"` for the v6.x LTS line beginning with v6.9.0.
+ - `'Argon'` for the v4.x LTS line beginning with v4.2.0.
+ - `'Boron'` for the v6.x LTS line beginning with v6.9.0.
* `sourceUrl` {string} an absolute URL pointing to a _`.tar.gz`_ file containing
the source code of the current release.
* `headersUrl`{string} an absolute URL pointing to a _`.tar.gz`_ file containing
@@ -1461,7 +1461,7 @@ added: v2.0.0
The `process.seteuid()` method sets the effective user identity of the process.
(See seteuid(2).) The `id` can be passed as either a numeric ID or a username
-string. If a username is specified, the method blocks while resolving the
+string. If a username is specified, the method blocks while resolving the
associated numeric ID.
```js
@@ -1487,7 +1487,7 @@ added: v0.1.31
* `id` {string|number} The group name or ID
The `process.setgid()` method sets the group identity of the process. (See
-setgid(2).) The `id` can be passed as either a numeric ID or a group name
+setgid(2).) The `id` can be passed as either a numeric ID or a group name
string. If a group name is specified, this method blocks while resolving the
associated numeric ID.
@@ -1528,7 +1528,7 @@ added: v0.1.28
-->
The `process.setuid(id)` method sets the user identity of the process. (See
-setuid(2).) The `id` can be passed as either a numeric ID or a username string.
+setuid(2).) The `id` can be passed as either a numeric ID or a username string.
If a username is specified, the method blocks while resolving the associated
numeric ID.
@@ -1790,7 +1790,7 @@ Will generate an object similar to:
## Exit Codes
Node.js will normally exit with a `0` status code when no more async
-operations are pending. The following status codes are used in other
+operations are pending. The following status codes are used in other
cases:
* `1` **Uncaught Fatal Exception** - There was an uncaught exception,
@@ -1798,12 +1798,12 @@ cases:
handler.
* `2` - Unused (reserved by Bash for builtin misuse)
* `3` **Internal JavaScript Parse Error** - The JavaScript source code
- internal in Node.js's bootstrapping process caused a parse error. This
+ internal in Node.js's bootstrapping process caused a parse error. This
is extremely rare, and generally can only happen during development
of Node.js itself.
* `4` **Internal JavaScript Evaluation Failure** - The JavaScript
source code internal in Node.js's bootstrapping process failed to
- return a function value when evaluated. This is extremely rare, and
+ return a function value when evaluated. This is extremely rare, and
generally can only happen during development of Node.js itself.
* `5` **Fatal Error** - There was a fatal unrecoverable error in V8.
Typically a message will be printed to stderr with the prefix `FATAL
@@ -1813,23 +1813,23 @@ cases:
function was somehow set to a non-function, and could not be called.
* `7` **Internal Exception Handler Run-Time Failure** - There was an
uncaught exception, and the internal fatal exception handler
- function itself threw an error while attempting to handle it. This
+ function itself threw an error while attempting to handle it. This
can happen, for example, if a [`'uncaughtException'`][] or
`domain.on('error')` handler throws an error.
-* `8` - Unused. In previous versions of Node.js, exit code 8 sometimes
+* `8` - Unused. In previous versions of Node.js, exit code 8 sometimes
indicated an uncaught exception.
* `9` - **Invalid Argument** - Either an unknown option was specified,
or an option requiring a value was provided without a value.
* `10` **Internal JavaScript Run-Time Failure** - The JavaScript
source code internal in Node.js's bootstrapping process threw an error
- when the bootstrapping function was called. This is extremely rare,
+ when the bootstrapping function was called. This is extremely rare,
and generally can only happen during development of Node.js itself.
* `12` **Invalid Debug Argument** - The `--debug`, `--inspect` and/or
`--debug-brk` options were set, but the port number chosen was invalid
or unavailable.
* `>128` **Signal Exits** - If Node.js receives a fatal signal such as
`SIGKILL` or `SIGHUP`, then its exit code will be `128` plus the
- value of the signal code. This is a standard POSIX practice, since
+ value of the signal code. This is a standard POSIX practice, since
exit codes are defined to be 7-bit integers, and signal exits set
the high-order bit, and then contain the value of the signal code.
diff --git a/doc/api/readline.md b/doc/api/readline.md
index 04c838a0024ed4..d31af7ff3f6b02 100644
--- a/doc/api/readline.md
+++ b/doc/api/readline.md
@@ -295,7 +295,7 @@ added: v0.1.98
* `shift` {boolean} `true` to indicate the `` key.
* `name` {string} The name of the a key.
-The `rl.write()` method will write either `data` or a key sequence identified
+The `rl.write()` method will write either `data` or a key sequence identified
by `key` to the `output`. The `key` argument is supported only if `output` is
a [TTY][] text terminal.
diff --git a/doc/api/repl.md b/doc/api/repl.md
index 17c44fbe36c7ba..e8c87240b1a579 100644
--- a/doc/api/repl.md
+++ b/doc/api/repl.md
@@ -96,7 +96,7 @@ declared either implicitly or using the `var` keyword are declared at the
The default evaluator provides access to any variables that exist in the global
scope. It is possible to expose a variable to the REPL explicitly by assigning
-it to the `context` object associated with each `REPLServer`. For example:
+it to the `context` object associated with each `REPLServer`. For example:
```js
const repl = require('repl');
@@ -391,7 +391,7 @@ added: v0.1.91
stream upon instantiation.
* `eval` {Function} The function to be used when evaluating each given line
of input. Defaults to an async wrapper for the JavaScript `eval()`
- function. An `eval` function can error with `repl.Recoverable` to indicate
+ function. An `eval` function can error with `repl.Recoverable` to indicate
the input was incomplete and prompt for additional lines.
* `useColors` {boolean} If `true`, specifies that the default `writer`
function should include ANSI color styling to REPL output. If a custom
@@ -414,7 +414,7 @@ added: v0.1.91
* `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is
equivalent to prefacing every repl statement with `'use strict'`.
* `repl.REPL_MODE_MAGIC` - attempt to evaluates expressions in default
- mode. If expressions fail to parse, re-try in strict mode.
+ mode. If expressions fail to parse, re-try in strict mode.
* `breakEvalOnSigint` - Stop evaluating the current piece of code when
`SIGINT` is received, i.e. `Ctrl+C` is pressed. This cannot be used together
with a custom `eval` function. Defaults to `false`.
@@ -456,7 +456,7 @@ environment variables:
- `NODE_REPL_HISTORY` - When a valid path is given, persistent REPL history
will be saved to the specified file rather than `.node_repl_history` in the
- user's home directory. Setting this value to `""` will disable persistent
+ user's home directory. Setting this value to `''` will disable persistent
REPL history. Whitespace will be trimmed from the value.
- `NODE_REPL_HISTORY_SIZE` - Defaults to `1000`. Controls how many lines of
history will be persisted if history is available. Must be a positive number.
@@ -469,7 +469,7 @@ environment variables:
By default, the Node.js REPL will persist history between `node` REPL sessions
by saving inputs to a `.node_repl_history` file located in the user's home
directory. This can be disabled by setting the environment variable
-`NODE_REPL_HISTORY=""`.
+`NODE_REPL_HISTORY=''`.
#### NODE_REPL_HISTORY_FILE
* `size` {number} Optional argument to specify how much data to read.
-* Return {string|Buffer|null}
+* Returns: {string|Buffer|null}
The `readable.read()` method pulls some data out of the internal buffer and
returns it. If no data available to be read, `null` is returned. By default,
@@ -1364,7 +1364,7 @@ It is recommended that errors occurring during the processing of the
the callback and passing the error as the first argument. This will cause an
`'error'` event to be emitted by the Writable. Throwing an Error from within
`writable._write()` can result in unexpected and inconsistent behavior depending
-on how the stream is being used. Using the callback ensures consistent and
+on how the stream is being used. Using the callback ensures consistent and
predictable handling of errors.
```js
@@ -1543,7 +1543,7 @@ user programs.
#### readable.push(chunk[, encoding])
* `chunk` {Buffer|null|string} Chunk of data to push into the read queue
-* `encoding` {string} Encoding of String chunks. Must be a valid
+* `encoding` {string} Encoding of String chunks. Must be a valid
Buffer encoding, such as `'utf8'` or `'ascii'`
* Returns: {boolean} `true` if additional chunks of data may continued to be
pushed; `false` otherwise.
@@ -1963,7 +1963,7 @@ The `transform._transform()` method is prefixed with an underscore because it
is internal to the class that defines it, and should never be called directly by
user programs.
-`transform._transform()` is never called in parallel; streams implement a
+`transform._transform()` is never called in parallel; streams implement a
queue mechanism, and to receive the next chunk, `callback` must be
called, either synchronously or asynchronously.
diff --git a/doc/api/tls.md b/doc/api/tls.md
index d307b376ef58e2..9b6027ab11f278 100644
--- a/doc/api/tls.md
+++ b/doc/api/tls.md
@@ -374,7 +374,7 @@ added: v0.6.0
-->
Returns the bound address, the address family name, and port of the
-server as reported by the operating system. See [`net.Server.address()`][] for
+server as reported by the operating system. See [`net.Server.address()`][] for
more information.
### server.close([callback])
@@ -472,7 +472,7 @@ added: v0.11.4
* `options` {Object}
* `isServer`: The SSL/TLS protocol is asymmetrical, TLSSockets must know if
they are to behave as a server or a client. If `true` the TLS socket will be
- instantiated as a server. Defaults to `false`.
+ instantiated as a server. Defaults to `false`.
* `server` {net.Server} An optional [`net.Server`][] instance.
* `requestCert`: Whether to authenticate the remote peer by requesting a
certificate. Clients always request a server certificate. Servers
@@ -624,7 +624,7 @@ For example:
{ ... another certificate, possibly with a .issuerCertificate ... },
raw: < RAW DER buffer >,
valid_from: 'Nov 11 09:52:22 2009 GMT',
- valid_to: 'Nov 6 09:52:22 2029 GMT',
+ valid_to: 'Nov 6 09:52:22 2029 GMT',
fingerprint: '2A:7A:C2:DD:E5:F9:CC:53:72:35:99:7A:02:5A:71:38:52:EC:8A:DF',
serialNumber: 'B9B0D332A1AA5635' }
```
@@ -795,7 +795,7 @@ changes:
rather than creating a new socket. Typically, this is an instance of
[`net.Socket`][], but any `Duplex` stream is allowed.
If this option is specified, `path`, `host` and `port` are ignored,
- except for certificate validation. Usually, a socket is already connected
+ except for certificate validation. Usually, a socket is already connected
when passed to `tls.connect()`, but it can be connected later. Note that
connection/disconnection/destruction of `socket` is the user's
responsibility, calling `tls.connect()` will not cause `net.connect()` to be
@@ -910,7 +910,7 @@ added: v0.11.13
to decrypt it.
* `key` {string|string[]|Buffer|Buffer[]|Object[]} Optional private keys in
PEM format. PEM allows the option of private keys being encrypted. Encrypted
- keys will be decrypted with `options.passphrase`. Multiple keys using
+ keys will be decrypted with `options.passphrase`. Multiple keys using
different algorithms can be provided either as an array of unencrypted key
strings or buffers, or an array of objects in the form `{pem:
[, passphrase: ]}`. The object form can only occur in
@@ -923,7 +923,7 @@ added: v0.11.13
consist of the PEM formatted certificate for a provided private `key`,
followed by the PEM formatted intermediate certificates (if any), in order,
and not including the root CA (the root CA must be pre-known to the peer,
- see `ca`). When providing multiple cert chains, they do not have to be in
+ see `ca`). When providing multiple cert chains, they do not have to be in
the same order as their private keys in `key`. If the intermediate
certificates are not provided, the peer will not be able to validate the
certificate, and the handshake will fail.
@@ -933,7 +933,7 @@ added: v0.11.13
using this option. The value can be a string or Buffer, or an Array of
strings and/or Buffers. Any string or Buffer can contain multiple PEM CAs
concatenated together. The peer's certificate must be chainable to a CA
- trusted by the server for the connection to be authenticated. When using
+ trusted by the server for the connection to be authenticated. When using
certificates that are not chainable to a well-known CA, the certificate's CA
must be explicitly specified as a trusted or the connection will fail to
authenticate.
@@ -945,7 +945,7 @@ added: v0.11.13
* `crl` {string|string[]|Buffer|Buffer[]} Optional PEM formatted
CRLs (Certificate Revocation Lists).
* `ciphers` {string} Optional cipher suite specification, replacing the
- default. For more information, see [modifying the default cipher suite][].
+ default. For more information, see [modifying the default cipher suite][].
* `honorCipherOrder` {boolean} Attempt to use the server's cipher suite
preferences instead of the client's. When `true`, causes
`SSL_OP_CIPHER_SERVER_PREFERENCE` to be set in `secureOptions`, see
@@ -954,7 +954,7 @@ added: v0.11.13
APIs that create secure contexts leave it unset.
* `ecdhCurve` {string} A string describing a named curve to use for ECDH key
agreement or `false` to disable ECDH. Defaults to
- [`tls.DEFAULT_ECDH_CURVE`]. Use [`crypto.getCurves()`][] to obtain a list
+ [`tls.DEFAULT_ECDH_CURVE`]. Use [`crypto.getCurves()`][] to obtain a list
of available curve names. On recent releases, `openssl ecparam -list_curves`
will also display the name and description of each available elliptic curve.
* `dhparam` {string|Buffer} Diffie Hellman parameters, required for
@@ -964,8 +964,8 @@ added: v0.11.13
for stronger security. If omitted or invalid, the parameters are silently
discarded and DHE ciphers will not be available.
* `secureProtocol` {string} Optional SSL method to use, default is
- `"SSLv23_method"`. The possible values are listed as [SSL_METHODS][], use
- the function names as strings. For example, `"SSLv3_method"` to force SSL
+ `'SSLv23_method'`. The possible values are listed as [SSL_METHODS][], use
+ the function names as strings. For example, `'SSLv3_method'` to force SSL
version 3.
* `secureOptions` {number} Optionally affect the OpenSSL protocol behavior,
which is not usually necessary. This should be used carefully if at all!
@@ -1027,7 +1027,7 @@ added: v0.3.2
servers, the identity options (`pfx` or `key`/`cert`) are usually required.
* `secureConnectionListener` {Function}
-Creates a new [tls.Server][]. The `secureConnectionListener`, if provided, is
+Creates a new [tls.Server][]. The `secureConnectionListener`, if provided, is
automatically set as a listener for the [`'secureConnection'`][] event.
The following illustrates a simple echo server:
diff --git a/doc/api/url.md b/doc/api/url.md
index 61899d3c0ccf7b..5b9c3c23f43426 100644
--- a/doc/api/url.md
+++ b/doc/api/url.md
@@ -51,7 +51,7 @@ properties of a WHATWG `URL` object.
├─────────────┴─────────────────────┴─────────────────────┴──────────┴────────────────┴───────┤
│ href │
└─────────────────────────────────────────────────────────────────────────────────────────────┘
-(all spaces in the "" line should be ignored -- they are purely for formatting)
+(all spaces in the "" line should be ignored — they are purely for formatting)
```
Parsing the URL string using the WHATWG API:
@@ -555,7 +555,7 @@ Instantiate a new `URLSearchParams` object with an iterable map in a way that
is similar to [`Map`][]'s constructor. `iterable` can be an Array or any
iterable object. That means `iterable` can be another `URLSearchParams`, in
which case the constructor will simply create a clone of the provided
-`URLSearchParams`. Elements of `iterable` are key-value pairs, and can
+`URLSearchParams`. Elements of `iterable` are key-value pairs, and can
themselves be any iterable object.
Duplicate keys are allowed.
diff --git a/doc/api/util.md b/doc/api/util.md
index 9e29cfc073ccc5..e1ad92095539bb 100644
--- a/doc/api/util.md
+++ b/doc/api/util.md
@@ -23,9 +23,9 @@ added: v0.11.3
The `util.debuglog()` method is used to create a function that conditionally
writes debug messages to `stderr` based on the existence of the `NODE_DEBUG`
-environment variable. If the `section` name appears within the value of that
+environment variable. If the `section` name appears within the value of that
environment variable, then the returned function operates similar to
-[`console.error()`][]. If not, then the returned function is a no-op.
+[`console.error()`][]. If not, then the returned function is a no-op.
For example:
@@ -43,7 +43,7 @@ it will output something like:
FOO 3245: hello from foo [123]
```
-where `3245` is the process id. If it is not run with that
+where `3245` is the process id. If it is not run with that
environment variable set, then it will not print anything.
Multiple comma-separated `section` names may be specified in the `NODE_DEBUG`
@@ -108,7 +108,7 @@ corresponding argument. Supported placeholders are:
* `%d` - Number (integer or floating point value).
* `%i` - Integer.
* `%f` - Floating point value.
-* `%j` - JSON. Replaced with the string `'[Circular]'` if the argument
+* `%j` - JSON. Replaced with the string `'[Circular]'` if the argument
contains circular references.
* `%%` - single percent sign (`'%'`). This does not consume an argument.
@@ -157,7 +157,7 @@ the two styles are [semantically incompatible][]._
* `constructor` {Function}
* `superConstructor` {Function}
-Inherit the prototype methods from one [constructor][] into another. The
+Inherit the prototype methods from one [constructor][] into another. The
prototype of `constructor` will be set to a new object created from
`superConstructor`.
diff --git a/doc/api/v8.md b/doc/api/v8.md
index 234835376bd3e0..a74bf64e535108 100644
--- a/doc/api/v8.md
+++ b/doc/api/v8.md
@@ -110,7 +110,7 @@ after the VM has started may result in unpredictable behavior, including
crashes and data loss; or it may simply do nothing.
The V8 options available for a version of Node.js may be determined by running
-`node --v8-options`. An unofficial, community-maintained list of options
+`node --v8-options`. An unofficial, community-maintained list of options
and their effects is available [here][].
Usage:
diff --git a/doc/api/vm.md b/doc/api/vm.md
index 8c196f1d1a682f..9aab34f20b86c2 100644
--- a/doc/api/vm.md
+++ b/doc/api/vm.md
@@ -108,7 +108,7 @@ added: v0.3.1
will be thrown.
* `breakOnSigint`: if `true`, the execution will be terminated when
`SIGINT` (Ctrl+C) is received. Existing handlers for the
- event that have been attached via `process.on("SIGINT")` will be disabled
+ event that have been attached via `process.on('SIGINT')` will be disabled
during script execution, but will continue to work after that.
If execution is terminated, an [`Error`][] will be thrown.
diff --git a/doc/api/zlib.md b/doc/api/zlib.md
index 6cef23824977c9..19ab15231913c3 100644
--- a/doc/api/zlib.md
+++ b/doc/api/zlib.md
@@ -57,8 +57,8 @@ header is used to identify the compression encodings actually applied to a
message.
**Note: the examples given below are drastically simplified to show
-the basic concept.** Using `zlib` encoding can be expensive, and the results
-ought to be cached. See [Memory Usage Tuning][] for more information
+the basic concept.** Using `zlib` encoding can be expensive, and the results
+ought to be cached. See [Memory Usage Tuning][] for more information
on the speed/memory/compression tradeoffs involved in `zlib` usage.
```js
@@ -157,7 +157,7 @@ The memory requirements for deflate are (in bytes):
(1 << (windowBits + 2)) + (1 << (memLevel + 9));
```
-That is: 128K for windowBits=15 + 128K for memLevel = 8
+That is: 128K for windowBits = 15 + 128K for memLevel = 8
(default values) plus a few kilobytes for small objects.
For example, to reduce the default memory requirements from 256K to 128K, the
@@ -175,20 +175,20 @@ The memory requirements for inflate are (in bytes)
1 << windowBits;
```
-That is, 32K for windowBits=15 (default value) plus a few kilobytes
+That is, 32K for windowBits = 15 (default value) plus a few kilobytes
for small objects.
This is in addition to a single internal output slab buffer of size
`chunkSize`, which defaults to 16K.
The speed of `zlib` compression is affected most dramatically by the
-`level` setting. A higher level will result in better compression, but
-will take longer to complete. A lower level will result in less
+`level` setting. A higher level will result in better compression, but
+will take longer to complete. A lower level will result in less
compression, but will be much faster.
In general, greater memory usage options will mean that Node.js has to make
fewer calls to `zlib` because it will be able to process more data on
-each `write` operation. So, this is another factor that affects the
+each `write` operation. So, this is another factor that affects the
speed, at the cost of memory usage.
## Flushing
@@ -231,7 +231,7 @@ added: v0.5.8
All of the constants defined in `zlib.h` are also defined on `require('zlib')`.
In the normal course of operations, it will not be necessary to use these
constants. They are documented so that their presence is not surprising. This
-section is taken almost directly from the [zlib documentation][]. See
+section is taken almost directly from the [zlib documentation][]. See
for more details.
Allowed flush values.
@@ -288,14 +288,14 @@ added: v0.11.1
-Each class takes an `options` object. All options are optional.
+Each class takes an `options` object. All options are optional.
Note that some options are only relevant when compressing, and are
ignored by the decompression classes.
* `flush` (default: `zlib.Z_NO_FLUSH`)
* `finishFlush` (default: `zlib.Z_FINISH`)
-* `chunkSize` (default: 16*1024)
+* `chunkSize` (default: `16 * 1024`)
* `windowBits`
* `level` (compression only)
* `memLevel` (compression only)
From 0ca2dad3a64b53baa9fdd0079ff3ef2ee5350453 Mon Sep 17 00:00:00 2001
From: Ivan Filenko
Date: Wed, 24 Jan 2018 01:42:40 +0300
Subject: [PATCH 041/227] src: free memory before re-setting URLHost value
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Fixes: https://github.com/nodejs/node/issues/18302
Backport-PR-URL: https://github.com/nodejs/node/pull/19639
PR-URL: https://github.com/nodejs/node/pull/18357
Reviewed-By: Tiancheng "Timothy" Gu
Reviewed-By: Anatoli Papirovski
Reviewed-By: James M Snell
Reviewed-By: Anna Henningsen
Reviewed-By: Tobias Nießen
---
src/node_url.cc | 19 +++++++++++++------
1 file changed, 13 insertions(+), 6 deletions(-)
diff --git a/src/node_url.cc b/src/node_url.cc
index 0e5c395b130d52..2a1058d3536c53 100644
--- a/src/node_url.cc
+++ b/src/node_url.cc
@@ -92,6 +92,16 @@ class URLHost {
Value value_;
HostType type_ = HostType::H_FAILED;
+ inline void Reset() {
+ using string = std::string;
+ switch (type_) {
+ case HostType::H_DOMAIN: value_.domain.~string(); break;
+ case HostType::H_OPAQUE: value_.opaque.~string(); break;
+ default: break;
+ }
+ type_ = HostType::H_FAILED;
+ }
+
// Setting the string members of the union with = is brittle because
// it relies on them being initialized to a state that requires no
// destruction of old data.
@@ -101,12 +111,14 @@ class URLHost {
// These helpers are the easiest solution but we might want to consider
// just not forcing strings into an union.
inline void SetOpaque(std::string* string) {
+ Reset();
type_ = HostType::H_OPAQUE;
new(&value_.opaque) std::string();
value_.opaque.swap(*string);
}
inline void SetDomain(std::string* string) {
+ Reset();
type_ = HostType::H_DOMAIN;
new(&value_.domain) std::string();
value_.domain.swap(*string);
@@ -114,12 +126,7 @@ class URLHost {
};
URLHost::~URLHost() {
- using string = std::string;
- switch (type_) {
- case HostType::H_DOMAIN: value_.domain.~string(); break;
- case HostType::H_OPAQUE: value_.opaque.~string(); break;
- default: break;
- }
+ Reset();
}
#define ARGS(XX) \
From 8ab8d6afd6d2feda0fd7c1e7c05093719be4b51a Mon Sep 17 00:00:00 2001
From: Anna Henningsen
Date: Sat, 29 Apr 2017 20:25:35 +0200
Subject: [PATCH 042/227] stream: fix y.pipe(x)+y.pipe(x)+y.unpipe(x)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Fix the uncommon situation when a readable stream is piped twice into
the same destination stream, and then unpiped once.
Previously, the `unpipe` event handlers weren’t able to tell whether
they were corresponding to the “right” conceptual pipe that was being
removed; this fixes this by adding a counter to the `unpipe` event
handler and only removing a single piping destination at most.
Fixes: https://github.com/nodejs/node/issues/12718
PR-URL: https://github.com/nodejs/node/pull/12746
Reviewed-By: Benjamin Gruenbaum
Reviewed-By: Matteo Collina
---
lib/_stream_readable.js | 14 ++--
...test-stream-pipe-same-destination-twice.js | 78 +++++++++++++++++++
2 files changed, 87 insertions(+), 5 deletions(-)
create mode 100644 test/parallel/test-stream-pipe-same-destination-twice.js
diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js
index dfba99cbc74d7d..39a7ec8d93ad53 100644
--- a/lib/_stream_readable.js
+++ b/lib/_stream_readable.js
@@ -499,10 +499,13 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
src.once('end', endFn);
dest.on('unpipe', onunpipe);
- function onunpipe(readable) {
+ function onunpipe(readable, unpipeInfo) {
debug('onunpipe');
if (readable === src) {
- cleanup();
+ if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
+ unpipeInfo.hasUnpiped = true;
+ cleanup();
+ }
}
}
@@ -628,6 +631,7 @@ function pipeOnDrain(src) {
Readable.prototype.unpipe = function(dest) {
var state = this._readableState;
+ var unpipeInfo = { hasUnpiped: false };
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0)
@@ -647,7 +651,7 @@ Readable.prototype.unpipe = function(dest) {
state.pipesCount = 0;
state.flowing = false;
if (dest)
- dest.emit('unpipe', this);
+ dest.emit('unpipe', this, unpipeInfo);
return this;
}
@@ -662,7 +666,7 @@ Readable.prototype.unpipe = function(dest) {
state.flowing = false;
for (var i = 0; i < len; i++)
- dests[i].emit('unpipe', this);
+ dests[i].emit('unpipe', this, unpipeInfo);
return this;
}
@@ -676,7 +680,7 @@ Readable.prototype.unpipe = function(dest) {
if (state.pipesCount === 1)
state.pipes = state.pipes[0];
- dest.emit('unpipe', this);
+ dest.emit('unpipe', this, unpipeInfo);
return this;
};
diff --git a/test/parallel/test-stream-pipe-same-destination-twice.js b/test/parallel/test-stream-pipe-same-destination-twice.js
new file mode 100644
index 00000000000000..1824c0606451a2
--- /dev/null
+++ b/test/parallel/test-stream-pipe-same-destination-twice.js
@@ -0,0 +1,78 @@
+'use strict';
+const common = require('../common');
+
+// Regression test for https://github.com/nodejs/node/issues/12718.
+// Tests that piping a source stream twice to the same destination stream
+// works, and that a subsequent unpipe() call only removes the pipe *once*.
+const assert = require('assert');
+const { PassThrough, Writable } = require('stream');
+
+{
+ const passThrough = new PassThrough();
+ const dest = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(`${chunk}`, 'foobar');
+ cb();
+ })
+ });
+
+ passThrough.pipe(dest);
+ passThrough.pipe(dest);
+
+ assert.strictEqual(passThrough._events.data.length, 2);
+ assert.strictEqual(passThrough._readableState.pipesCount, 2);
+ assert.strictEqual(passThrough._readableState.pipes[0], dest);
+ assert.strictEqual(passThrough._readableState.pipes[1], dest);
+
+ passThrough.unpipe(dest);
+
+ assert.strictEqual(passThrough._events.data.length, 1);
+ assert.strictEqual(passThrough._readableState.pipesCount, 1);
+ assert.strictEqual(passThrough._readableState.pipes, dest);
+
+ passThrough.write('foobar');
+ passThrough.pipe(dest);
+}
+
+{
+ const passThrough = new PassThrough();
+ const dest = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(`${chunk}`, 'foobar');
+ cb();
+ }, 2)
+ });
+
+ passThrough.pipe(dest);
+ passThrough.pipe(dest);
+
+ assert.strictEqual(passThrough._events.data.length, 2);
+ assert.strictEqual(passThrough._readableState.pipesCount, 2);
+ assert.strictEqual(passThrough._readableState.pipes[0], dest);
+ assert.strictEqual(passThrough._readableState.pipes[1], dest);
+
+ passThrough.write('foobar');
+}
+
+{
+ const passThrough = new PassThrough();
+ const dest = new Writable({
+ write: common.mustNotCall()
+ });
+
+ passThrough.pipe(dest);
+ passThrough.pipe(dest);
+
+ assert.strictEqual(passThrough._events.data.length, 2);
+ assert.strictEqual(passThrough._readableState.pipesCount, 2);
+ assert.strictEqual(passThrough._readableState.pipes[0], dest);
+ assert.strictEqual(passThrough._readableState.pipes[1], dest);
+
+ passThrough.unpipe(dest);
+ passThrough.unpipe(dest);
+
+ assert.strictEqual(passThrough._events.data, undefined);
+ assert.strictEqual(passThrough._readableState.pipesCount, 0);
+
+ passThrough.write('foobar');
+}
From e54b8e818428ae1146030d3500cb637488a21edc Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E9=99=88=E5=88=9A?=
Date: Sat, 20 Jan 2018 11:06:26 +0800
Subject: [PATCH 043/227] stream: cleanup() when unpiping all streams.
This PR makes sure the object emitted as the 'unpipe'
event in the destination stream is not shared between
destination, as it would be muted.
Refs: https://github.com/nodejs/node/pull/12746
PR-URL: https://github.com/nodejs/node/pull/18266
Reviewed-By: Luigi Pinca
Reviewed-By: Anatoli Papirovski
Reviewed-By: Matteo Collina
Reviewed-By: James M Snell
Reviewed-By: Ruben Bridgewater
Reviewed-By: Anna Henningsen
---
lib/_stream_readable.js | 2 +-
.../test-stream-pipe-unpipe-streams.js | 54 +++++++++++++++++++
2 files changed, 55 insertions(+), 1 deletion(-)
diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js
index 39a7ec8d93ad53..21268eacd02748 100644
--- a/lib/_stream_readable.js
+++ b/lib/_stream_readable.js
@@ -666,7 +666,7 @@ Readable.prototype.unpipe = function(dest) {
state.flowing = false;
for (var i = 0; i < len; i++)
- dests[i].emit('unpipe', this, unpipeInfo);
+ dests[i].emit('unpipe', this, { hasUnpiped: false });
return this;
}
diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js
index 7e425aec1e379e..0e49dba5b8212b 100644
--- a/test/parallel/test-stream-pipe-unpipe-streams.js
+++ b/test/parallel/test-stream-pipe-unpipe-streams.js
@@ -31,3 +31,57 @@ source.unpipe(dest2);
source.unpipe(dest1);
assert.strictEqual(source._readableState.pipes, null);
+
+{
+ // test `cleanup()` if we unpipe all streams.
+ const source = Readable({ read: () => {} });
+ const dest1 = Writable({ write: () => {} });
+ const dest2 = Writable({ write: () => {} });
+
+ let destCount = 0;
+ const srcCheckEventNames = ['end', 'data'];
+ const destCheckEventNames = ['close', 'finish', 'drain', 'error', 'unpipe'];
+
+ const checkSrcCleanup = common.mustCall(() => {
+ assert.strictEqual(source._readableState.pipes, null);
+ assert.strictEqual(source._readableState.pipesCount, 0);
+ assert.strictEqual(source._readableState.flowing, false);
+
+ srcCheckEventNames.forEach((eventName) => {
+ assert.strictEqual(
+ source.listenerCount(eventName), 0,
+ `source's '${eventName}' event listeners not removed`
+ );
+ });
+ });
+
+ function checkDestCleanup(dest) {
+ const currentDestId = ++destCount;
+ source.pipe(dest);
+
+ const unpipeChecker = common.mustCall(() => {
+ assert.deepStrictEqual(
+ dest.listeners('unpipe'), [unpipeChecker],
+ `destination{${currentDestId}} should have a 'unpipe' event ` +
+ 'listener which is `unpipeChecker`'
+ );
+ dest.removeListener('unpipe', unpipeChecker);
+ destCheckEventNames.forEach((eventName) => {
+ assert.strictEqual(
+ dest.listenerCount(eventName), 0,
+ `destination{${currentDestId}}'s '${eventName}' event ` +
+ 'listeners not removed'
+ );
+ });
+
+ if (--destCount === 0)
+ checkSrcCleanup();
+ });
+
+ dest.on('unpipe', unpipeChecker);
+ }
+
+ checkDestCleanup(dest1);
+ checkDestCleanup(dest2);
+ source.unpipe();
+}
From a9562fe30cc3a9f95078ace013e85432999857b9 Mon Sep 17 00:00:00 2001
From: Jason Ginchereau
Date: Mon, 20 Mar 2017 14:55:26 -0700
Subject: [PATCH 044/227] n-api: add support for abi stable module API
Add support for abi stable module API (N-API) as "Experimental feature".
The goal of this API is to provide a stable Node API for native
module developers. N-API aims to provide ABI compatibility guarantees
across different Node versions and also across different
Node VMs - allowing N-API enabled native modules to just work
across different versions and flavors of Node.js without recompilation.
A more detailed introduction is provided in:
https://github.com/nodejs/node-eps/blob/master/005-ABI-Stable-Module-API.md
and https://github.com/nodejs/abi-stable-node/blob/doc/VM%20Summit.pdf.
The feature, during its experimental state, will be guarded by a runtime
flag "--napi-modules". Only when this flag is added to the command line
will N-API modules along with regular non N-API modules be supported.
The API is defined by the methods in "src/node_api.h" and
"src/node_api_types.h". This is the best
starting point to review the API surface. More documentation will follow.
In addition to the implementation of the API using V8, which is included
in this PR, the API has also been validated against chakracore and that
port is available in
https://github.com/nodejs/abi-stable-node/tree/api-prototype-chakracore-8.x.
The current plan is to provide N-API support in versions 8.X and 6.X
directly. For older versions, such as 4.X or pre N-API versions of 6.X,
we plan to create an external npm module to provide a migration path
that will allow modules targeting older Node.js versions to use the API,
albeit without getting the advantage of not having to recompile.
In addition, we also plan an external npm package with C++ sugar to
simplify the use of the API. The sugar will be in-line only and will
only use the exported N-API methods but is not part of the N-API
itself. The current version is in:
https://github.com/nodejs/node-api.
This PR is a result of work in the abi-stable-node repo:
https://github.com/nodejs/abi-stable-node/tree/doc,
with this PR being the cumulative work on the api-prototype-8.x
branch with the following contributors in alphabetical order:
Author: Arunesh Chandra
Author: Gabriel Schulhof
Author: Hitesh Kanwathirtha
Author: Ian Halliday
Author: Jason Ginchereau
Author: Michael Dawson
Author: Sampson Gao
Author: Taylor Woll
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/11975
Reviewed-By: Anna Henningsen
Reviewed-By: James M Snell
---
Makefile | 60 +-
doc/api/cli.md | 8 +
doc/node.1 | 5 +
node.gyp | 3 +
src/node.cc | 29 +-
src/node_api.cc | 2529 +++++++++++++++++
src/node_api.h | 479 ++++
src/node_api_backport.h | 10 +
src/node_api_types.h | 95 +
test/addons-napi/.gitignore | 7 +
test/addons-napi/1_hello_world/binding.c | 22 +
test/addons-napi/1_hello_world/binding.gyp | 8 +
test/addons-napi/1_hello_world/test.js | 6 +
.../2_function_arguments/binding.c | 58 +
.../2_function_arguments/binding.gyp | 8 +
test/addons-napi/2_function_arguments/test.js | 6 +
test/addons-napi/3_callbacks/binding.c | 51 +
test/addons-napi/3_callbacks/binding.gyp | 8 +
test/addons-napi/3_callbacks/test.js | 22 +
test/addons-napi/4_object_factory/binding.c | 31 +
test/addons-napi/4_object_factory/binding.gyp | 8 +
test/addons-napi/4_object_factory/test.js | 8 +
test/addons-napi/5_function_factory/binding.c | 36 +
.../5_function_factory/binding.gyp | 8 +
test/addons-napi/5_function_factory/test.js | 7 +
test/addons-napi/6_object_wrap/binding.cc | 7 +
test/addons-napi/6_object_wrap/binding.gyp | 8 +
test/addons-napi/6_object_wrap/myobject.cc | 201 ++
test/addons-napi/6_object_wrap/myobject.h | 26 +
test/addons-napi/6_object_wrap/test.js | 19 +
test/addons-napi/7_factory_wrap/binding.cc | 32 +
test/addons-napi/7_factory_wrap/binding.gyp | 8 +
test/addons-napi/7_factory_wrap/myobject.cc | 110 +
test/addons-napi/7_factory_wrap/myobject.h | 26 +
test/addons-napi/7_factory_wrap/test.js | 14 +
test/addons-napi/8_passing_wrapped/binding.cc | 57 +
.../addons-napi/8_passing_wrapped/binding.gyp | 8 +
.../addons-napi/8_passing_wrapped/myobject.cc | 81 +
test/addons-napi/8_passing_wrapped/myobject.h | 26 +
test/addons-napi/8_passing_wrapped/test.js | 9 +
test/addons-napi/test_array/binding.gyp | 8 +
test/addons-napi/test_array/test.js | 37 +
test/addons-napi/test_array/test_array.c | 137 +
test/addons-napi/test_buffer/binding.gyp | 8 +
test/addons-napi/test_buffer/test.js | 25 +
test/addons-napi/test_buffer/test_buffer.c | 160 ++
test/addons-napi/test_constructor/binding.gyp | 8 +
test/addons-napi/test_constructor/test.js | 28 +
.../test_constructor/test_constructor.c | 104 +
test/addons-napi/test_conversions/binding.gyp | 8 +
test/addons-napi/test_conversions/test.js | 140 +
.../test_conversions/test_conversions.c | 237 ++
test/addons-napi/test_error/binding.gyp | 8 +
test/addons-napi/test_error/test.js | 57 +
test/addons-napi/test_error/test_error.cc | 37 +
test/addons-napi/test_exception/binding.gyp | 8 +
test/addons-napi/test_exception/test.js | 53 +
.../test_exception/test_exception.c | 75 +
test/addons-napi/test_function/binding.gyp | 8 +
test/addons-napi/test_function/test.js | 28 +
.../addons-napi/test_function/test_function.c | 55 +
test/addons-napi/test_instanceof/binding.gyp | 8 +
test/addons-napi/test_instanceof/test.js | 87 +
.../test_instanceof/test_instanceof.c | 39 +
test/addons-napi/test_number/binding.gyp | 8 +
test/addons-napi/test_number/test.js | 39 +
test/addons-napi/test_number/test_number.c | 55 +
test/addons-napi/test_object/binding.gyp | 8 +
test/addons-napi/test_object/test.js | 65 +
test/addons-napi/test_object/test_object.c | 246 ++
test/addons-napi/test_properties/binding.gyp | 8 +
test/addons-napi/test_properties/test.js | 27 +
.../test_properties/test_properties.c | 85 +
test/addons-napi/test_string/binding.gyp | 8 +
test/addons-napi/test_string/test.js | 26 +
test/addons-napi/test_string/test_string.c | 134 +
test/addons-napi/test_symbol/binding.gyp | 8 +
test/addons-napi/test_symbol/test1.js | 20 +
test/addons-napi/test_symbol/test2.js | 15 +
test/addons-napi/test_symbol/test3.js | 19 +
test/addons-napi/test_symbol/test_symbol.c | 94 +
test/addons-napi/test_typedarray/binding.gyp | 8 +
test/addons-napi/test_typedarray/test.js | 39 +
.../test_typedarray/test_typedarray.c | 144 +
test/addons-napi/testcfg.py | 6 +
test/testpy/__init__.py | 2 +-
tools/install.py | 2 +
tools/test.py | 1 +
vcbuild.bat | 30 +-
89 files changed, 6586 insertions(+), 18 deletions(-)
create mode 100644 src/node_api.cc
create mode 100644 src/node_api.h
create mode 100644 src/node_api_backport.h
create mode 100644 src/node_api_types.h
create mode 100644 test/addons-napi/.gitignore
create mode 100644 test/addons-napi/1_hello_world/binding.c
create mode 100644 test/addons-napi/1_hello_world/binding.gyp
create mode 100644 test/addons-napi/1_hello_world/test.js
create mode 100644 test/addons-napi/2_function_arguments/binding.c
create mode 100644 test/addons-napi/2_function_arguments/binding.gyp
create mode 100644 test/addons-napi/2_function_arguments/test.js
create mode 100644 test/addons-napi/3_callbacks/binding.c
create mode 100644 test/addons-napi/3_callbacks/binding.gyp
create mode 100644 test/addons-napi/3_callbacks/test.js
create mode 100644 test/addons-napi/4_object_factory/binding.c
create mode 100644 test/addons-napi/4_object_factory/binding.gyp
create mode 100644 test/addons-napi/4_object_factory/test.js
create mode 100644 test/addons-napi/5_function_factory/binding.c
create mode 100644 test/addons-napi/5_function_factory/binding.gyp
create mode 100644 test/addons-napi/5_function_factory/test.js
create mode 100644 test/addons-napi/6_object_wrap/binding.cc
create mode 100644 test/addons-napi/6_object_wrap/binding.gyp
create mode 100644 test/addons-napi/6_object_wrap/myobject.cc
create mode 100644 test/addons-napi/6_object_wrap/myobject.h
create mode 100644 test/addons-napi/6_object_wrap/test.js
create mode 100644 test/addons-napi/7_factory_wrap/binding.cc
create mode 100644 test/addons-napi/7_factory_wrap/binding.gyp
create mode 100644 test/addons-napi/7_factory_wrap/myobject.cc
create mode 100644 test/addons-napi/7_factory_wrap/myobject.h
create mode 100644 test/addons-napi/7_factory_wrap/test.js
create mode 100644 test/addons-napi/8_passing_wrapped/binding.cc
create mode 100644 test/addons-napi/8_passing_wrapped/binding.gyp
create mode 100644 test/addons-napi/8_passing_wrapped/myobject.cc
create mode 100644 test/addons-napi/8_passing_wrapped/myobject.h
create mode 100644 test/addons-napi/8_passing_wrapped/test.js
create mode 100644 test/addons-napi/test_array/binding.gyp
create mode 100644 test/addons-napi/test_array/test.js
create mode 100644 test/addons-napi/test_array/test_array.c
create mode 100644 test/addons-napi/test_buffer/binding.gyp
create mode 100644 test/addons-napi/test_buffer/test.js
create mode 100644 test/addons-napi/test_buffer/test_buffer.c
create mode 100644 test/addons-napi/test_constructor/binding.gyp
create mode 100644 test/addons-napi/test_constructor/test.js
create mode 100644 test/addons-napi/test_constructor/test_constructor.c
create mode 100644 test/addons-napi/test_conversions/binding.gyp
create mode 100644 test/addons-napi/test_conversions/test.js
create mode 100644 test/addons-napi/test_conversions/test_conversions.c
create mode 100644 test/addons-napi/test_error/binding.gyp
create mode 100644 test/addons-napi/test_error/test.js
create mode 100644 test/addons-napi/test_error/test_error.cc
create mode 100644 test/addons-napi/test_exception/binding.gyp
create mode 100644 test/addons-napi/test_exception/test.js
create mode 100644 test/addons-napi/test_exception/test_exception.c
create mode 100644 test/addons-napi/test_function/binding.gyp
create mode 100644 test/addons-napi/test_function/test.js
create mode 100644 test/addons-napi/test_function/test_function.c
create mode 100644 test/addons-napi/test_instanceof/binding.gyp
create mode 100644 test/addons-napi/test_instanceof/test.js
create mode 100644 test/addons-napi/test_instanceof/test_instanceof.c
create mode 100644 test/addons-napi/test_number/binding.gyp
create mode 100644 test/addons-napi/test_number/test.js
create mode 100644 test/addons-napi/test_number/test_number.c
create mode 100644 test/addons-napi/test_object/binding.gyp
create mode 100644 test/addons-napi/test_object/test.js
create mode 100644 test/addons-napi/test_object/test_object.c
create mode 100644 test/addons-napi/test_properties/binding.gyp
create mode 100644 test/addons-napi/test_properties/test.js
create mode 100644 test/addons-napi/test_properties/test_properties.c
create mode 100644 test/addons-napi/test_string/binding.gyp
create mode 100644 test/addons-napi/test_string/test.js
create mode 100644 test/addons-napi/test_string/test_string.c
create mode 100644 test/addons-napi/test_symbol/binding.gyp
create mode 100644 test/addons-napi/test_symbol/test1.js
create mode 100644 test/addons-napi/test_symbol/test2.js
create mode 100644 test/addons-napi/test_symbol/test3.js
create mode 100644 test/addons-napi/test_symbol/test_symbol.c
create mode 100644 test/addons-napi/test_typedarray/binding.gyp
create mode 100644 test/addons-napi/test_typedarray/test.js
create mode 100644 test/addons-napi/test_typedarray/test_typedarray.c
create mode 100644 test/addons-napi/testcfg.py
diff --git a/Makefile b/Makefile
index c947a2a4367d05..523a3d8c541ed8 100644
--- a/Makefile
+++ b/Makefile
@@ -120,9 +120,10 @@ v8:
test: all
$(MAKE) build-addons
+ $(MAKE) build-addons-napi
$(MAKE) cctest
$(PYTHON) tools/test.py --mode=release -J \
- doctool inspector known_issues message pseudo-tty parallel sequential addons
+ doctool inspector known_issues message pseudo-tty parallel sequential addons addons-napi
$(MAKE) lint
test-parallel: all
@@ -189,6 +190,41 @@ test/addons/.buildstamp: config.gypi \
# TODO(bnoordhuis) Force rebuild after gyp update.
build-addons: $(NODE_EXE) test/addons/.buildstamp
+ADDONS_NAPI_BINDING_GYPS := \
+ $(filter-out test/addons-napi/??_*/binding.gyp, \
+ $(wildcard test/addons-napi/*/binding.gyp))
+
+ADDONS_NAPI_BINDING_SOURCES := \
+ $(filter-out test/addons-napi/??_*/*.cc, $(wildcard test/addons-napi/*/*.cc)) \
+ $(filter-out test/addons-napi/??_*/*.h, $(wildcard test/addons-napi/*/*.h))
+
+# Implicitly depends on $(NODE_EXE), see the build-addons-napi rule for rationale.
+test/addons-napi/.buildstamp: config.gypi \
+ deps/npm/node_modules/node-gyp/package.json \
+ $(ADDONS_NAPI_BINDING_GYPS) $(ADDONS_NAPI_BINDING_SOURCES) \
+ deps/uv/include/*.h deps/v8/include/*.h \
+ src/node.h src/node_buffer.h src/node_object_wrap.h src/node_version.h \
+ src/node_api.h src/node_api_types.h
+# Cannot use $(wildcard test/addons-napi/*/) here, it's evaluated before
+# embedded addons have been generated from the documentation.
+ @for dirname in test/addons-napi/*/; do \
+ printf "\nBuilding addon $$PWD/$$dirname\n" ; \
+ env MAKEFLAGS="-j1" $(NODE) deps/npm/node_modules/node-gyp/bin/node-gyp \
+ --loglevel=$(LOGLEVEL) rebuild \
+ --python="$(PYTHON)" \
+ --directory="$$PWD/$$dirname" \
+ --nodedir="$$PWD" || exit 1 ; \
+ done
+ touch $@
+
+# .buildstamp and .docbuildstamp need $(NODE_EXE) but cannot depend on it
+# directly because it calls make recursively. The parent make cannot know
+# if the subprocess touched anything so it pessimistically assumes that
+# .buildstamp and .docbuildstamp are out of date and need a rebuild.
+# Just goes to show that recursive make really is harmful...
+# TODO(bnoordhuis) Force rebuild after gyp or node-gyp update.
+build-addons-napi: $(NODE_EXE) test/addons-napi/.buildstamp
+
clear-stalled:
# Clean up any leftover processes but don't error if found.
ps awwx | grep Release/node | grep -v grep | cat
@@ -200,7 +236,9 @@ clear-stalled:
test-gc: all test/gc/node_modules/weak/build/Release/weakref.node
$(PYTHON) tools/test.py --mode=release gc
-test-build: | all build-addons
+test-build: | all build-addons build-addons-napi
+
+test-build-addons-napi: all build-addons-napi
test-all: test-build test/gc/node_modules/weak/build/Release/weakref.node
$(PYTHON) tools/test.py --mode=debug,release
@@ -208,12 +246,12 @@ test-all: test-build test/gc/node_modules/weak/build/Release/weakref.node
test-all-valgrind: test-build
$(PYTHON) tools/test.py --mode=debug,release --valgrind
-CI_NATIVE_SUITES := addons
+CI_NATIVE_SUITES := addons addons-napi
CI_JS_SUITES := doctool inspector known_issues message parallel pseudo-tty sequential
# Build and test addons without building anything else
test-ci-native: LOGLEVEL := info
-test-ci-native: | test/addons/.buildstamp
+test-ci-native: | test/addons/.buildstamp test/addons-napi/.buildstamp
$(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \
--mode=release --flaky-tests=$(FLAKY_TESTS) \
$(TEST_CI_ARGS) $(CI_NATIVE_SUITES)
@@ -231,11 +269,11 @@ test-ci-js: | clear-stalled
fi
test-ci: LOGLEVEL := info
-test-ci: | clear-stalled build-addons
+test-ci: | clear-stalled build-addons build-addons-napi
out/Release/cctest --gtest_output=tap:cctest.tap
$(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \
--mode=release --flaky-tests=$(FLAKY_TESTS) \
- $(TEST_CI_ARGS) $(CI_JS_SUITES) $(CI_NATIVE_SUITES)
+ $(TEST_CI_ARGS) $(CI_JS_SUITES) addons-napi $(CI_NATIVE_SUITES)
# Clean up any leftover processes, error if found.
ps awwx | grep Release/node | grep -v grep | cat
@PS_OUT=`ps awwx | grep Release/node | grep -v grep | awk '{print $$1}'`; \
@@ -282,7 +320,10 @@ test-npm: $(NODE_EXE)
test-npm-publish: $(NODE_EXE)
npm_package_config_publishtest=true $(NODE) deps/npm/test/run.js
-test-addons: test-build
+test-addons-napi: test-build-addons-napi
+ $(PYTHON) tools/test.py --mode=release addons-napi
+
+test-addons: test-build test-addons-napi
$(PYTHON) tools/test.py --mode=release addons
test-addons-clean:
@@ -813,6 +854,8 @@ LINT_CPP_FILES = $(filter-out $(LINT_CPP_EXCLUDE), $(wildcard \
test/addons/*/*.h \
test/cctest/*.cc \
test/cctest/*.h \
+ test/addons-napi/*/*.cc \
+ test/addons-napi/*/*.h \
tools/icu/*.cc \
tools/icu/*.h \
))
@@ -857,5 +900,6 @@ endif
bench-buffer bench-net bench-http bench-fs bench-tls cctest run-ci \
test-v8 test-v8-intl test-v8-benchmarks test-v8-all v8 lint-ci \
bench-ci lint-js-ci doc-only $(TARBALL)-headers test-ci test-ci-native \
- test-ci-js build-ci test-hash-seed clear-stalled
+ test-ci-js build-ci test-hash-seed clear-stalled test-addons-napi \
+ build-addons-napi
diff --git a/doc/api/cli.md b/doc/api/cli.md
index a8aee18e3efd3e..281a7fbbfba9c3 100644
--- a/doc/api/cli.md
+++ b/doc/api/cli.md
@@ -119,6 +119,14 @@ added: v6.0.0
Silence all process warnings (including deprecations).
+### `--napi-modules`
+
+
+Enable loading native modules compiled with the ABI-stable Node.js API (N-API)
+(experimental).
+
### `--trace-warnings`
+```C
+NAPI_EXTERN napi_status
+napi_get_last_error_info(napi_env env,
+ const napi_extended_error_info** result);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[out] result`: The `napi_extended_error_info` structure with more
+information about the error.
+
+Returns `napi_ok` if the API succeeded.
+
+This API retrieves a `napi_extended_error_info` structure with information
+about the last error that occured.
+
+**Note:** Do not rely on the content or format of any of the extended
+information as it is not subject to SemVer and may change at any time.
+It is intended only for logging purposes.
+
+
+### Exceptions
+Any N-API function call may result in a pending JavaScript exception. This is
+obviously the case for any function that may cause the execution of
+JavaScript, but N-API specifies that an exception may be pending
+on return from any of the API functions.
+
+If the `napi_status` returned by a function is `napi_ok` then no
+exception is pending and no additional action is required. If the
+`napi_status` returned is anything other than `napi_ok` or
+`napi_pending_exception`, in order to try to recover and continue
+instead of simply returning immediately, [`napi_is_exception_pending`][]
+must be called in order to determine if an exception is pending or not.
+
+When an exception is pending one of two approaches can be employed.
+
+The first appoach is to do any appropriate cleanup and then return so that
+execution will return to JavaScript. As part of the transition back to
+JavaScript the exception will be thrown at the point in the JavaScript
+code where the native method was invoked. The behavior of most N-API calls
+is unspecified while an exception is pending, and many will simply return
+`napi_pending_exception`, so it is important to do as little as possible
+and then return to JavaScript where the exception can be handled.
+
+The second approach is to try to handle the exception. There will be cases
+where the native code can catch the exception, take the appropriate action,
+and then continue. This is only recommended in specific cases
+where it is known that the exception can be safely handled. In these
+cases [`napi_get_and_clear_last_exception`][] can be used to get and
+clear the exception. On success, result will contain the handle to
+the last JavaScript Object thrown. If it is determined, after
+retrieving the exception, the exception cannot be handled after all
+it can be re-thrown it with [`napi_throw`][] where error is the
+JavaScript Error object to be thrown.
+
+The following utility functions are also available in case native code
+needs to throw an exception or determine if a `napi_value` is an instance
+of a JavaScript `Error` object: [`napi_throw_error`][],
+[`napi_throw_type_error`][], [`napi_throw_range_error`][] and
+[`napi_is_error`][].
+
+The following utility functions are also available in case native
+code needs to create an Error object: [`napi_create_error`][],
+[`napi_create_type_error`][], and [`napi_create_range_error`][].
+where result is the napi_value that refers to the newly created
+JavaScript Error object.
+
+#### napi_throw
+
+```C
+NODE_EXTERN napi_status napi_throw(napi_env env, napi_value error);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] error`: The `napi_value` for the Error to be thrown.
+
+Returns `napi_ok` if the API succeeded.
+
+This API throws the JavaScript Error provided.
+
+
+#### napi_throw_error
+
+```C
+NODE_EXTERN napi_status napi_throw_error(napi_env env, const char* msg);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] msg`: C string representing the text to be associated with
+the error.
+
+Returns `napi_ok` if the API succeeded.
+
+This API throws a JavaScript Error with the text provided.
+
+#### napi_throw_type_error
+
+```C
+NODE_EXTERN napi_status napi_throw_type_error(napi_env env, const char* msg);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] msg`: C string representing the text to be associated with
+the error.
+
+Returns `napi_ok` if the API succeeded.
+
+This API throws a JavaScript TypeError with the text provided.
+
+#### napi_throw_range_error
+
+```C
+NODE_EXTERN napi_status napi_throw_range_error(napi_env env, const char* msg);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] msg`: C string representing the text to be associated with
+the error.
+
+Returns `napi_ok` if the API succeeded.
+
+This API throws a JavaScript RangeError with the text provided.
+
+
+#### napi_is_error
+
+```C
+NODE_EXTERN napi_status napi_is_error(napi_env env,
+ napi_value value,
+ bool* result);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] msg`: The `napi_value` to be checked.
+- `[out] result`: Boolean value that is set to true if `napi_value` represents
+an error, false otherwise.
+
+Returns `napi_ok` if the API succeeded.
+
+This API queries a `napi_value` to check if it represents an error object.
+
+
+#### napi_create_error
+
+```C
+NODE_EXTERN napi_status napi_create_error(napi_env env, const char* msg);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] msg`: C string representing the text to be associated with.
+- `[out] result`: `napi_value` representing the error created.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns a JavaScript Error with the text provided.
+
+#### napi_create_type_error
+
+```C
+NODE_EXTERN napi_status napi_create_type_error(napi_env env, const char* msg);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] msg`: C string representing the text to be associated with.
+- `[out] result`: `napi_value` representing the error created.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns a JavaScript TypeError with the text provided.
+
+
+#### napi_create_range_error
+
+```C
+NODE_EXTERN napi_status napi_create_range_error(napi_env env, const char* msg);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] msg`: C string representing the text to be associated with.
+- `[out] result`: `napi_value` representing the error created.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns a JavaScript RangeError with the text provided.
+
+
+#### napi_get_and_clear_last_exception
+
+```C
+NAPI_EXTERN napi_status napi_get_and_clear_last_exception(napi_env env,
+ napi_value* result);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[out] result`: The exception if one is pending, NULL otherwise.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns true if an exception is pending.
+
+#### napi_is_exception_pending
+
+```C
+NAPI_EXTERN napi_status napi_is_exception_pending(napi_env env, bool* result);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[out] result`: Boolean value that is set to true if an exception is pending.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns true if an exception is pending.
+
+
+## Object Lifetime management
+
+As N-API calls are made, handles to objects in the heap for the underlying
+VM may be returned as `napi_values`. These handles must hold the
+objects 'live' until they are no longer required by the native code,
+otherwise the objects could be collected before the native code was
+finished using them.
+
+As object handles are returned they are associated with a
+'scope'. The lifespan for the default scope is tied to the lifespan
+of the native method call. The result is that, by default, handles
+remain valid and the objects associated with these handles will be
+held live for the lifespan of the native method call.
+
+In many cases, however, it is necessary that the handles remain valid for
+either a shorter or longer lifespan than that of the native method.
+The sections which follow describe the N-API functions than can be used
+to change the handle lifespan from the default.
+
+### Making handle lifespan shorter than that of the native method
+It is often necessary to make the lifespan of handles shorter than
+the lifespan of a native method. For example, consider a native method
+that has a loop which iterates through the elements in a large array:
+
+```C
+for (int i = 0; i < 1000000; i++) {
+ napi_value result;
+ napi_status status = napi_get_element(e object, i, &result);
+ if (status != napi_ok) {
+ break;
+ }
+ // do something with element
+}
+```
+
+This would result in a large number of handles being created, consuming
+substantial resources. In addition, even though the native code could only
+use the most recent handle, all of the associated objects would also be
+kept alive since they all share the same scope.
+
+To handle this case, N-API provides the ability to establish a new 'scope' to
+which newly created handles will be associated. Once those handles
+are no longer required, the scope can be 'closed' and any handles associated
+with the scope are invalidated. The methods available to open/close scopes are
+[`napi_open_handle_scope`][] and [`napi_close_handle_scope`][].
+
+N-API only supports a single nested hiearchy of scopes. There is only one
+active scope at any time, and all new handles will be associated with that
+scope while it is active. Scopes must be closed in the reverse order from
+which they are opened. In addition, all scopes created within a native method
+must be closed before returning from that method.
+
+Taking the earlier example, adding calls to [`napi_open_handle_scope`][] and
+[`napi_close_handle_scope`][] would ensure that at most a single handle
+is valid throughout the execution of the loop:
+
+```C
+for (int i = 0; i < 1000000; i++) {napi_
+ napi_handle_scope scope;
+ napi_status status = napi_open_handle_scope(env, &scope);
+ if (status != napi_ok) {
+ break;
+ }
+ napi_value result;
+ status = napi_get_element(e object, i, &result);
+ if (status != napi_ok) {
+ break;
+ }
+ // do something with element
+ status = napi_close_handle_scope(env, scope);
+ if (status != napi_ok) {
+ break;
+ }
+}
+```
+
+When nesting scopes, there are cases where a handle from an
+inner scope needs to live beyond the lifespan of that scope. N-API supports an
+'escapable scope' in order to support this case. An escapable scope
+allows one or more handles to be 'promoted' so that they 'escape' the
+current scope and the lifespan of the handle(s) changes from the current
+scope to that of the outer scope.
+
+The methods available to open/close escapable scopes are
+[`napi_open_escapable_handle_scope`][] and [`napi_close_escapable_handle_scope`][].
+
+The request to promote a handle is made through the [`napi_escape_handle`][].
+
+#### napi_open_handle_scope
+
+```C
+NODE_EXTERN napi_status napi_open_handle_scope(napi_env env,
+ napi_handle_scope* result);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[out] result`: `napi_value` representing the new scope.
+
+Returns `napi_ok` if the API succeeded.
+
+This API open a new scope.
+
+#### napi_close_handle_scope
+
+```C
+NODE_EXTERN napi_status napi_close_handle_scope(napi_env env,
+ napi_handle_scope scope);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] scope`: `napi_value` representing the scope to be closed.
+
+Returns `napi_ok` if the API succeeded.
+
+This API closes the scope passed in. Scopes must be closed in the
+reverse order from which they were created.
+
+#### napi_open_escapable_handle_scope
+
+```C
+NODE_EXTERN napi_status
+ napi_open_escapable_handle_scope(napi_env env,
+ napi_handle_scope* result);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[out] result`: `napi_value` representing the new scope.
+
+Returns `napi_ok` if the API succeeded.
+
+This API open a new scope from which objects can be promoted
+to the outer scope.
+
+#### napi_close_escapable_handle_scope
+
+```C
+NODE_EXTERN napi_status
+ napi_close_escapable_handle_scope(napi_env env,
+ napi_handle_scope scope);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] scope`: `napi_value` representing the scope to be closed.
+
+Returns `napi_ok` if the API succeeded.
+
+This API closes the scope passed in. Scopes must be closed in the
+reverse order from which they were created.
+
+#### napi_escape_handle
+
+```C
+NAPI_EXTERN napi_status napi_escape_handle(napi_env env,
+ napi_escapable_handle_scope scope,
+ napi_value escapee,
+ napi_value* result);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] scope`: `napi_value` representing the current scope.
+- `[in] escapee`: `napi_value` representing the JavaScript Object to be escaped.
+- `[out] result`: `napi_value` representing the handle to the escaped
+Object in the outer scope.
+
+Returns `napi_ok` if the API succeeded.
+
+This API promotes the handle to the JavaScript object so that it valid
+for the lifetime of the outer scope.
+
+
+### References to objects with a lifespan longer than that of the native method
+In some cases an addon will need to be able to create and reference objects
+with a lifespan longer than that of a single native method invocation. For
+example, to create a constructor and later use that constructor
+in a request to creates instances, it must be possible to reference
+the constructor object across many different instance creation requests. This
+would not be possible with a normal handle returned as a `napi_value` as
+described in the earlier section. The lifespan of a normal handle is
+managed by scopes and all scopes must be closed before the end of a native
+method.
+
+N-API provides methods to create persistent references to an object.
+Each persistent reference has an associated count with a value of 0
+or higher. The count determines if the reference will keep
+the corresponding object live. References with a count of 0 do not
+prevent the object from being collected and are often called 'weak'
+references. Any count greater than 0 will prevent the object
+from being collected.
+
+References can be created with an initial reference count. The count can
+then be modified through [`napi_reference_ref`][] and
+[`napi_reference_unref`][]. If an object is collected while the count
+for a reference is 0, all subsequent calls to
+get the object associated with the reference [`napi_get_reference_value`][]
+will return NULL for the returned `napi_value`. An attempt to call
+[`napi_reference_ref`][] for a reference whose object has been collected
+will result in an error.
+
+References must be deleted once they are no longer required by the addon. When
+a reference is deleted it will no longer prevent the corresponding object from
+being collected. Failure to delete a persistent reference will result in
+a 'memory leak' with both the native memory for the persistent reference and
+the corresponding object on the heap being retained forever.
+
+There can be multiple persistent references created which refer to the same
+object, each of which will either keep the object live or not based on its
+individual count.
+
+#### napi_create_reference
+
+```C
+NODE_EXTERN napi_status napi_create_reference(napi_env env,
+ napi_value value,
+ int initial_refcount,
+ ndapi_ref* result);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing the Object to which we want
+a reference to.
+- `[in] initial_refcount`: Initial reference count for the new reference.
+- `[out] result`: `napi_ref` pointing to the new reference.
+
+Returns `napi_ok` if the API succeeded.
+
+This API create a new reference with the specified reference count
+to the Object passed in.
+
+#### napi_delete_reference
+
+```C
+NODE_EXTERN napi_status napi_delete_reference(napi_env env, napi_ref ref);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] ref`: `napi_ref` to be deleted.
+
+Returns `napi_ok` if the API succeeded.
+
+This API deletes the reference passed in.
+
+#### napi_reference_ref
+
+```C
+NODE_EXTERN napi_status napi_reference_ref(napi_env env,
+ napi_ref ref,
+ int* result);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] ref`: `napi_ref` for which the reference count will be incremented.
+- `[out] result`: The new reference count.
+
+Returns `napi_ok` if the API succeeded.
+
+This API increments the reference count for the reference
+passed in and returns the resulting reference count.
+
+
+#### napi_reference_unref
+
+```C
+NODE_EXTERN napi_status napi_reference_unref(napi_env env,
+ napi_ref ref,
+ int* result);
+```
+- `[in] env`: The environment that the API is invoked under.
+- `[in] ref`: `napi_ref` for which the reference count will be decremented.
+- `[out] result`: The new reference count.
+
+Returns `napi_ok` if the API succeeded.
+
+This API decrements the reference count for the reference
+passed in and returns the resulting reference count.
+
+
+#### napi_get_reference_value
+
+```C
+NODE_EXTERN napi_status napi_get_reference_value(napi_env env,
+ napi_ref ref,
+ napi_value* result);
+```
+
+the `napi_value passed` in or out of these methods is a handle to the
+object to which the reference is related.
+- `[in] env`: The environment that the API is invoked under.
+- `[in] ref`: `napi_ref` for which we requesting the corresponding Object.
+- `[out] result`: The `napi_value` for the Object referenced by the
+`napi_ref`.
+
+Returns `napi_ok` if the API succeeded.
+
+If still valid, this API returns the `napi_value` representing the
+JavaScript Object associated with the `napi_ref`. Otherise, result
+will be NULL.
+
+## Module registration
+N-API modules are registered in the same manner as other modules
+except that instead of using the `NODE_MODULE` macro the following
+is used:
+
+```C
+NAPI_MODULE(addon, Init)
+```
+
+The next difference is the signature for the `Init` method. For a N-API
+module it is as follows:
+
+```C
+void Init(napi_env env, napi_value exports, napi_value module, void* priv);
+```
+
+As with any other module, functions are exported by either adding them to
+the `exports` or `module` objects passed to the `Init` method.
+
+For example, to add the method `hello` as a function so that it can be called
+as a method provided by the addon:
+
+```C
+void Init(napi_env env, napi_value exports, napi_value module, void* priv) {
+ napi_status status;
+ napi_property_descriptor desc =
+ {"hello", Method, 0, 0, 0, napi_default, 0};
+ status = napi_define_properties(env, exports, 1, &desc);
+}
+```
+
+For example, to set a function to be returned by the `require()` for the addon:
+
+```C
+void Init(napi_env env, napi_value exports, napi_value module, void* priv) {
+ napi_status status;
+ napi_property_descriptor desc =
+ {"exports", Method, 0, 0, 0, napi_default, 0};
+ status = napi_define_properties(env, module, 1, &desc);
+}
+```
+
+For example, to define a class so that new instances can be created
+(often used with [Object Wrap][]):
+
+```C
+// NOTE: partial example, not all referenced code is included
+
+napi_status status;
+napi_property_descriptor properties[] = {
+ { "value", nullptr, GetValue, SetValue, 0, napi_default, 0 },
+ DECLARE_NAPI_METHOD("plusOne", PlusOne),
+ DECLARE_NAPI_METHOD("multiply", Multiply),
+};
+
+napi_value cons;
+status =
+ napi_define_class(env, "MyObject", New, nullptr, 3, properties, &cons);
+if (status != napi_ok) return;
+
+status = napi_create_reference(env, cons, 1, &constructor);
+if (status != napi_ok) return;
+
+status = napi_set_named_property(env, exports, "MyObject", cons);
+if (status != napi_ok) return;
+```
+
+For more details on setting properties on either the `exports` or `module`
+objects, see the section on
+[Working with JavaScript Properties][].
+
+For more details on building addon modules in general, refer to the existing API
+
+## Working with JavaScript Values
+N-API exposes a set of APIs to create all types of JavaScript values.
+Some of these types are documented under
+[Section 6](https://tc39.github.io/ecma262/#sec-ecmascript-data-types-and-values)
+of the [ECMAScript Language Specification][].
+
+Fundamentally, these APIs are used to do one of the following:
+1. Create a new JavaScript object
+2. Convert from a primitive C type to an N-API value
+3. Convert from N-API value to a primitive C type
+4. Get global instances including `undefined` and `null`
+
+N-API values are represented by the type `napi_value`.
+Any N-API call that requires a JavaScript value takes in a `napi_value`.
+In some cases, the API does check the type of the `napi_value` up-front.
+However, for better performance, it's better for the caller to make sure that
+the `napi_value` in question is of the JavaScript type expected by the API.
+
+### Enum types
+#### *napi_valuetype*
+```C
+typedef enum {
+ // ES6 types (corresponds to typeof)
+ napi_undefined,
+ napi_null,
+ napi_boolean,
+ napi_number,
+ napi_string,
+ napi_symbol,
+ napi_object,
+ napi_function,
+ napi_external,
+} napi_valuetype;
+```
+
+Describes the type of a `napi_value`. This generally corresponds to the types
+described in
+[Section 6.1](https://tc39.github.io/ecma262/#sec-ecmascript-language-types) of
+the ECMAScript Language Specification.
+In addition to types in that section, `napi_valuetype` can also represent
+Functions and Objects with external data.
+
+#### *napi_typedarray_type*
+```C
+typedef enum {
+ napi_int8_array,
+ napi_uint8_array,
+ napi_uint8_clamped_array,
+ napi_int16_array,
+ napi_uint16_array,
+ napi_int32_array,
+ napi_uint32_array,
+ napi_float32_array,
+ napi_float64_array,
+} napi_typedarray_type;
+```
+
+This represents the underlying binary scalar datatype of the TypedArray.
+Elements of this enum correspond to
+[Section 22.2](https://tc39.github.io/ecma262/#sec-typedarray-objects)
+of the [ECMAScript Language Specification][].
+
+### Object Creation Functions
+#### *napi_create_array*
+
+```C
+napi_status napi_create_array(napi_env env, napi_value* result)
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[out] result`: A `napi_value` representing a JavaScript Array.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns an N-API value corresponding to a JavaScript Array type.
+JavaScript arrays are described in
+[Section 22.1](https://tc39.github.io/ecma262/#sec-array-objects) of the
+ECMAScript Language Specification.
+
+#### *napi_create_array_with_length*
+
+```C
+napi_status napi_create_array_with_length(napi_env env,
+ size_t length,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] length`: The initial length of the Array.
+- `[out] result`: A `napi_value` representing a JavaScript Array.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns an N-API value corresponding to a JavaScript Array type.
+The Array's length property is set to the passed-in length parameter.
+However, the underlying buffer is not guaranteed to be pre-allocated by the VM
+when the array is created - that behavior is left to the underlying VM
+implementation.
+if the buffer must be a contiguous block of memory that can be
+directly read and/or written via C, consider using
+[`napi_create_external_arraybuffer`][].
+
+JavaScript arrays are described in
+[Section 22.1](https://tc39.github.io/ecma262/#sec-array-objects) of the
+ECMAScript Language Specification.
+
+#### *napi_create_arraybuffer*
+
+```C
+napi_status napi_create_arraybuffer(napi_env env,
+ size_t byte_length,
+ void** data,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] length`: The length in bytes of the array buffer to create.
+- `[out] data`: Pointer to the underlying byte buffer of the ArrayBuffer.
+- `[out] result`: A `napi_value` representing a JavaScript ArrayBuffer.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns an N-API value corresponding to a JavaScript ArrayBuffer.
+ArrayBuffers are used to represent fixed-length binary data buffers. They are
+normally used as a backing-buffer for TypedArray objects.
+The ArrayBuffer allocated will have an underlying byte buffer whose size is
+determined by the `length` parameter that's passed in.
+The underlying buffer is optionally returned back to the caller in case the
+caller wants to directly manipulate the buffer. This buffer can only be
+written to directly from native code. To write to this buffer from JavaScript,
+a typed array or DataView object would need to be created.
+
+JavaScript ArrayBuffer objects are described in
+[Section 24.1](https://tc39.github.io/ecma262/#sec-arraybuffer-objects)
+of the ECMAScript Language Specification.
+
+#### *napi_create_buffer*
+
+```C
+napi_status napi_create_buffer(napi_env env,
+ size_t size,
+ void** data,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] size`: Size in bytes of the underlying buffer.
+- `[out] data`: Raw pointer to the underlying buffer.
+- `[out] result`: A `napi_value` representing a `node::Buffer`.
+
+Returns `napi_ok` if the API succeeded.
+
+This API allocates a `node::Buffer` object. While this is still a
+fully-supported data structure, in most cases using a TypedArray will suffice.
+
+#### *napi_create_buffer_copy*
+
+```C
+napi_status napi_create_buffer_copy(napi_env env,
+ size_t length,
+ const void* data,
+ void** result_data,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] size`: Size in bytes of the input buffer (should be the same as the
+ size of the new buffer).
+- `[in] data`: Raw pointer to the underlying buffer to copy from.
+- `[out] result_data`: Pointer to the new Buffer's underlying data buffer.
+- `[out] result`: A `napi_value` representing a `node::Buffer`.
+
+Returns `napi_ok` if the API succeeded.
+
+This API allocates a `node::Buffer` object and initializes it with data copied
+from the passed-in buffer. While this is still a fully-supported data
+structure, in most cases using a TypedArray will suffice.
+
+#### *napi_create_external*
+
+```C
+napi_status napi_create_external(napi_env env,
+ void* data,
+ napi_finalize finalize_cb,
+ void* finalize_hint,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] data`: Raw pointer to the external data being wrapped.
+- `[in] finalize_cb`: Optional callback to call when the wrapped object
+is being collected.
+- `[in] finalize_hint`: Optional hint to pass to the finalize callback
+during collection.
+- `[out] result`: A `napi_value` representing an external object.
+
+Returns `napi_ok` if the API succeeded.
+
+This API allocates a JavaScript object with external data attached to it.
+This is used to wrap native objects and project them into JavaScript.
+The API allows the caller to pass in a finalize callback, in case the
+underlying native resource needs to be cleaned up when the wrapper
+JavaScript object gets collected.
+
+#### napi_create_external_arraybuffer
+
+```C
+napi_status
+napi_create_external_arraybuffer(napi_env env,
+ void* external_data,
+ size_t byte_length,
+ napi_finalize finalize_cb,
+ void* finalize_hint,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] external_data`: Pointer to the underlying byte buffer of the
+ArrayBuffer.
+- `[in] byte_length`: The length in bytes of the underlying buffer.
+- `[in] finalize_cb`: Optional callback to call when the ArrayBuffer is
+being collected.
+- `[in] finalize_hint`: Optional hint to pass to the finalize callback
+during collection.
+- `[out] result`: A `napi_value` representing a JavaScript ArrayBuffer.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns an N-API value corresponding to a JavaScript ArrayBuffer.
+The underlying byte buffer of the ArrayBuffer is externally allocated and
+managed. The caller must ensure that the byte buffer remains valid until the
+finalize callback is called.
+
+JavaScript ArrayBuffers are described in
+[Section 24.1](https://tc39.github.io/ecma262/#sec-arraybuffer-objects)
+of the ECMAScript Language Specification.
+
+#### *napi_create_external_buffer*
+
+```C
+napi_status napi_create_external_buffer(napi_env env,
+ size_t length,
+ void* data,
+ napi_finalize finalize_cb,
+ void* finalize_hint,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] length`: Size in bytes of the input buffer (should be the same as
+the size of the new buffer).
+- `[in] data`: Raw pointer to the underlying buffer to copy from.
+- `[in] finalize_cb`: Optional callback to call when the ArrayBuffer is
+being collected.
+- `[in] finalize_hint`: Optional hint to pass to the finalize callback
+during collection.
+- `[out] result`: A `napi_value` representing a `node::Buffer`.
+
+Returns `napi_ok` if the API succeeded.
+
+This API allocates a `node::Buffer` object and initializes it with data
+backed by the passed in buffer. While this is still a fully-supported data
+structure, in most cases using a TypedArray will suffice.
+
+**Note:** For Node.js >=4 `Buffers` are Uint8Arrays.
+
+#### *napi_create_function*
+
+```C
+napi_status napi_create_function(napi_env env,
+ const char* utf8name,
+ napi_callback cb,
+ void* data,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] utf8name`: A string representing the name of the function encoded as
+UTF8.
+- `[in] cb`: A function pointer to the native function to be invoked when the
+created function is invoked from JavaScript.
+- `[in] data`: Optional arbitrary context data to be passed into the native
+function when it is invoked.
+- `[out] result`: A `napi_value` representing a JavaScript function.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns an N-API value corresponding to a JavaScript Function object.
+It's used to wrap native functions so that they can be invoked from JavaScript.
+
+JavaScript Functions are described in
+[Section 19.2](https://tc39.github.io/ecma262/#sec-function-objects)
+of the ECMAScript Language Specification.
+
+#### *napi_create_object*
+
+```C
+napi_status napi_create_object(napi_env env, napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[out] result`: A `napi_value` representing a JavaScript Object.
+
+Returns `napi_ok` if the API succeeded.
+
+This API allocates a default JavaScript Object.
+It is the equivalent of doing `new Object()` in JavaScript.
+
+The JavaScript Object type is described in
+[Section 6.1.7](https://tc39.github.io/ecma262/#sec-object-type) of the
+ECMAScript Language Specification.
+
+#### *napi_create_symbol*
+
+```C
+napi_status napi_create_symbol(napi_env env,
+ const char* description,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] description`: Null-terminated character buffer representing a
+UTF8-encoded string to describe the symbol.
+- `[out] result`: A `napi_value` representing a JavaScript Symbol.
+
+Returns `napi_ok` if the API succeeded.
+
+This API creates a JavaScript Symbol object from a UTF8-encoded C string
+
+The JavaScript Symbol type is described in
+[Section 19.4](https://tc39.github.io/ecma262/#sec-symbol-objects)
+of the ECMAScript Language Specification.
+
+#### *napi_create_typedarray*
+
+```C
+napi_status napi_create_typedarray(napi_env env,
+ napi_typedarray_type type,
+ size_t length,
+ napi_value arraybuffer,
+ size_t byte_offset,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] type`: Scalar datatype of the elements within the TypedArray.
+- `[in] length`: Number of elements in the TypedArray.
+- `[in] arraybuffer`: ArrayBuffer underlying the typed array.
+- `[in] byte_offset`: The byte offset within the ArrayBuffer from which to
+start projecting the TypedArray.
+- `[out] result`: A `napi_value` representing a JavaScript TypedArray.
+
+Returns `napi_ok` if the API succeeded.
+
+This API creates a JavaScript TypedArray object over an existing ArrayBuffer.
+TypedArray objects provide an array-like view over an underlying data buffer
+where each element has the same underlying binary scalar datatype.
+
+It's required that (length * size_of_element) + byte_offset should
+be <= the size in bytes of the array passed in. If not, a RangeError exception is
+raised.
+
+JavaScript TypedArray Objects are described in
+[Section 22.2](https://tc39.github.io/ecma262/#sec-typedarray-objects)
+of the ECMAScript Language Specification.
+
+### Functions to convert from C types to N-API
+#### *napi_create_number*
+
+```C
+napi_status napi_create_number(napi_env env, double value, napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: Double-precision value to be represented in JavaScript.
+- `[out] result`: A `napi_value` representing a JavaScript Number.
+
+Returns `napi_ok` if the API succeeded.
+
+This API is used to convert from the C double type to the JavaScript
+Number type.
+
+The JavaScript Number type is described in
+[Section 6.1.6](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type)
+of the ECMAScript Language Specification.
+
+#### *napi_create_string_utf16*
+
+```C
+napi_status napi_create_string_utf16(napi_env env,
+ const char16_t* str,
+ size_t length,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] str`: Character buffer representing a UTF16-LE-encoded string.
+- `[in] length`: The length of the string in characters, or -1 if it is
+null-terminated.
+- `[out] result`: A `napi_value` representing a JavaScript String.
+
+Returns `napi_ok` if the API succeeded.
+
+This API creates a JavaScript String object from a UTF16-LE-encoded C string
+
+The JavaScript String type is described in
+[Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type)
+of the ECMAScript Language Specification.
+
+#### *napi_create_string_utf8*
+
+```C
+napi_status napi_create_string_utf8(napi_env env,
+ const char* str,
+ size_t length,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] s`: Character buffer representing a UTF8-encoded string.
+- `[in] length`: The length of the string in characters, or -1 if it is
+null-terminated.
+- `[out] result`: A `napi_value` representing a JavaScript String.
+
+Returns `napi_ok` if the API succeeded.
+
+This API creates a JavaScript String object from a UTF8-encoded C string
+
+The JavaScript String type is described in
+[Section 6.1.4](https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type)
+of the ECMAScript Language Specification.
+
+### Functions to convert from N-API to C types
+#### *napi_get_array_length*
+
+```C
+napi_status napi_get_array_length(napi_env env,
+ napi_value value,
+ uint32_t* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing the JavaScript Array whose length is
+being queried.
+- `[out] result`: `uint32` representing length of the array.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns the length of an array.
+
+Array length is described in
+[Section 22.1.4.1](https://tc39.github.io/ecma262/#sec-properties-of-array-instances-length)
+of the ECMAScript Language Specification.
+
+#### *napi_get_arraybuffer_info*
+
+```C
+napi_status napi_get_arraybuffer_info(napi_env env,
+ napi_value arraybuffer,
+ void** data,
+ size_t* byte_length)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] arraybuffer`: `napi_value` representing the ArrayBuffer being queried.
+- `[out] data`: The underlying data buffer of the ArrayBuffer.
+- `[out] byte_length`: Length in bytes of the underlying data buffer.
+
+Returns `napi_ok` if the API succeeded.
+
+This API is used to retrieve the underlying data buffer of an ArrayBuffer and
+its length.
+WARNING: Use caution while using this API. The lifetime of the underlying data
+buffer is managed by the ArrayBuffer even after it's returned. A
+possible safe way to use this API is in conjunction with [`napi_create_reference`][],
+which can be used to guarantee control over the lifetime of the
+ArrayBuffer. It's also safe to use the returned data buffer within the same
+callback as long as there are no calls to other APIs that might trigger a GC.
+
+#### *napi_get_buffer_info*
+
+```C
+napi_status napi_get_buffer_info(napi_env env,
+ napi_value value,
+ void** data,
+ size_t* length)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing the `node::Buffer` being queried.
+- `[out] data`: The underlying data buffer of the `node::Buffer`.
+- `[out] length`: Length in bytes of the underlying data buffer.
+
+Returns `napi_ok` if the API succeeded.
+
+This API is used to retrieve the underlying data buffer of a `node::Buffer`
+and it's length.
+Warning: Use caution while using this API since the underlying data buffer's
+lifetime is not guaranteed if it's managed by the VM.
+
+#### *napi_get_prototype*
+
+```C
+napi_status napi_get_prototype(napi_env env,
+ napi_value object,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] object`: `napi_value` representing JavaScript Object whose prototype
+to return. This returns the equivalent of `Object.getPrototypeOf` (which is
+not the same as the function's `prototype` property).
+- `[out] result`: `napi_value` representing prototype of the given object.
+
+Returns `napi_ok` if the API succeeded.
+
+#### *napi_get_typedarray_info*
+
+```C
+napi_status napi_get_typedarray_info(napi_env env,
+ napi_value typedarray,
+ napi_typedarray_type* type,
+ size_t* length,
+ void** data,
+ napi_value* arraybuffer,
+ size_t* byte_offset)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] typedarray`: `napi_value` representing the TypedArray whose
+properties to query.
+- `[out] type`: Scalar datatype of the elements within the TypedArray.
+- `[out] length`: Number of elements in the TypedArray.
+- `[out] data`: The data buffer underlying the typed array.
+- `[out] byte_offset`: The byte offset within the data buffer from which
+to start projecting the TypedArray.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns various properties of a typed array.
+Warning: Use caution while using this API since the underlying data buffer
+is managed by the VM
+
+#### *napi_get_value_bool*
+
+```C
+napi_status napi_get_value_bool(napi_env env, napi_value value, bool* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing JavaScript Boolean.
+- `[out] result`: C boolean primitive equivalent of the given JavaScript
+Boolean.
+
+Returns `napi_ok` if the API succeeded. If a non-boolean `napi_value` is
+passed in it returns `napi_boolean_expected`.
+
+This API returns C boolean primitive equivalent of the given JavaScript
+Boolea
+
+#### *napi_get_value_double*
+
+```C
+napi_status napi_get_value_double(napi_env env,
+ napi_value value,
+ double* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing JavaScript Number.
+- `[out] result`: C double primitive equivalent of the given JavaScript
+Number.
+
+Returns `napi_ok` if the API succeeded. If a non-number `napi_value` is passed
+in it returns `napi_number_expected`.
+
+This API returns the C double primitive equivalent of the given JavaScript
+Number.
+
+
+#### *napi_get_value_external*
+
+```C
+napi_status napi_get_value_external(napi_env env,
+ napi_value value,
+ void** result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing JavaScript External value.
+- `[out] result`: Pointer to the data wrapped by the JavaScript External value.
+
+Returns `napi_ok` if the API succeeded. If a non-external `napi_value` is
+passed in it returns `napi_invalid_arg`.
+
+This API returns the pointer to the data wrapped by the JavaScript
+External value
+
+#### *napi_get_value_int32*
+
+```C
+napi_status napi_get_value_int32(napi_env env,
+ napi_value value,
+ int32_t* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing JavaScript Number.
+- `[out] result`: C int32 primitive equivalent of the given JavaScript Number.
+
+Returns `napi_ok` if the API succeeded. If a non-number `napi_value`
+is passed in `napi_number_expected .
+
+This API returns the C int32 primitive equivalent
+of the given JavaScript Number. If the number exceeds the range of the
+32 bit integer, then the result is truncated to the equivalent of the
+bottom 32 bits. This can result in a large positive number becoming
+a negative number if the the value is > 2^31 -1.
+
+#### *napi_get_value_int64*
+
+```C
+napi_status napi_get_value_int64(napi_env env,
+ napi_value value,
+ int64_t* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing JavaScript Number.
+- `[out] result`: C int64 primitive equivalent of the given JavaScript Number.
+
+Returns `napi_ok` if the API succeeded. If a non-number `napi_value`
+is passed in it returns `napi_number_expected`.
+
+This API returns the C int64 primitive equivalent of the given
+JavaScript Number
+
+#### *napi_get_value_string_length*
+
+```C
+napi_status napi_get_value_string_length(napi_env env,
+ napi_value value,
+ int* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing JavaScript string.
+- `[out] result`: Number of characters in the given JavaScript string.
+
+Returns `napi_ok` if the API succeeded. If a non-String `napi_value`
+is passed in it returns `napi_string_expected`.
+
+This API returns the number of characters in the given JavaScript string.
+
+#### *napi_get_value_string_utf8*
+
+```C
+napi_status napi_get_value_string_utf8(napi_env env,
+ napi_value value,
+ char* buf,
+ size_t bufsize,
+ size_t* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing JavaScript string.
+- `[in] buf`: Buffer to write the UTF8-encoded string into. If NULL is passed
+ in, the length of the string (in bytes) is returned.
+- `[in] bufsize`: Size of the destination buffer.
+- `[out] result`: Number of bytes copied into the buffer including the null.
+terminator. If the buffer size is insufficient, the string will be truncated
+including a null terminator.
+
+Returns `napi_ok` if the API succeeded. Ifa non-String `napi_value`
+x is passed in it returns `napi_string_expected`.
+
+This API returns the UTF8-encoded string corresponding the value passed in.
+
+#### *napi_get_value_string_utf16_length*
+
+```C
+napi_status napi_get_value_string_utf16(napi_env env,
+ napi_value value,
+ char16_t* buf,
+ size_t bufsize,
+ size_t* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing JavaScript string.
+- `[in] buf`: Buffer to write the UTF16-LE-encoded string into. If NULL is
+passed in, the length of the string (in 2-byte code units) is returned.
+- `[in] bufsize`: Size of the destination buffer.
+- `[out] result`: Number of 2-byte code units copied into the buffer including
+the null terminateor. If the buffer size is insufficient, the string will be
+truncated including a null terminator.
+
+Returns `napi_ok` if the API succeeded. If a non-String `napi_value`
+is passed in it returns `napi_string_expected`.
+
+This API returns the UTF16-encoded string corresponding the value passed in.
+
+#### *napi_get_value_uint32*
+
+```C
+napi_status napi_get_value_uint32(napi_env env,
+ napi_value value,
+ uint32_t* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: `napi_value` representing JavaScript Number.
+- `[out] result`: C primitive equivalent of the given `napi_value` as a
+`uint32_t`.
+
+Returns `napi_ok` if the API succeeded. If a non-number `napi_value`
+is passed in it returns `napi_number_expected`.
+
+This API returns the C primitive equivalent of the given `napi_value` as a
+`uint32_t`.
+
+### Functions to get global instances
+#### *napi_get_boolean*
+
+```C
+napi_status napi_get_boolean(napi_env env, bool value, napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The value of the boolean to retrieve.
+- `[out] result`: `napi_value` representing JavaScript Boolean singleton to
+retrieve.
+
+Returns `napi_ok` if the API succeeded.
+
+This API is used to return the JavaScript singleton object that is used to
+represent the given boolean value
+
+#### *napi_get_global*
+
+```C
+napi_status napi_get_global(napi_env env, napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[out] result`: `napi_value` representing JavaScript Global Object.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns the global Object.
+
+#### *napi_get_null*
+
+```C
+napi_status napi_get_null(napi_env env, napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[out] result`: `napi_value` representing JavaScript Null Object.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns the null Object.
+
+#### *napi_get_undefined*
+
+```C
+napi_status napi_get_undefined(napi_env env, napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[out] result`: `napi_value` representing JavaScript Undefined value.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns the Undefined object.
+
+## Working with JavaScript Values - Abstract Operations
+
+N-API exposes a set of APIs to perform some abstract operations on JavaScript
+values. Some of these operations are documented under
+[Section 7](https://tc39.github.io/ecma262/#sec-abstract-operations)
+of the [ECMAScript Language Specification](https://tc39.github.io/ecma262/).
+
+These APIs support doing one of the following:
+1. Coerce JavaScript values to specific JavaScript types (such as Number or
+ String)
+2. Check the type of a JavaScript value
+3. Check for equality between two JavaScript values
+
+### *napi_coerce_to_bool*
+
+```C
+napi_status napi_coerce_to_bool(napi_env env,
+ napi_value value,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value to coerce.
+- `[out] result`: `napi_value` representing the coerced JavaScript Boolean.
+
+Returns `napi_ok` if the API succeeded.
+
+This API implements the abstract operation ToBoolean as defined in
+[Section 7.1.2](https://tc39.github.io/ecma262/#sec-toboolean)
+of the ECMAScript Language Specification.
+This API can be re-entrant if getters are defined on the passed-in Object.
+
+### *napi_coerce_to_number*
+
+```C
+napi_status napi_coerce_to_number(napi_env env,
+ napi_value value,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value to coerce.
+- `[out] result`: `napi_value` representing the coerced JavaScript Number.
+
+Returns `napi_ok` if the API succeeded.
+
+This API implements the abstract operation ToNumber as defined in
+[Section 7.1.3](https://tc39.github.io/ecma262/#sec-tonumber)
+of the ECMAScript Language Specification.
+This API can be re-entrant if getters are defined on the passed-in Object.
+
+### *napi_coerce_to_object*
+
+```C
+napi_status napi_coerce_to_object(napi_env env,
+ napi_value value,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value to coerce.
+- `[out] result`: `napi_value` representing the coerced JavaScript Object.
+
+Returns `napi_ok` if the API succeeded.
+
+This API implements the abstract operation ToObject as defined in
+[Section 7.1.13](https://tc39.github.io/ecma262/#sec-toobject)
+of the ECMAScript Language Specification.
+This API can be re-entrant if getters are defined on the passed-in Object.
+
+### *napi_coerce_to_string*
+
+```C
+napi_status napi_coerce_to_string(napi_env env,
+ napi_value value,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value to coerce.
+- `[out] result`: `napi_value` representing the coerced JavaScript String.
+
+Returns `napi_ok` if the API succeeded.
+
+This API implements the abstract operation ToString as defined in
+[Section 7.1.13](https://tc39.github.io/ecma262/#sec-tostring)
+of the ECMAScript Language Specification.
+This API can be re-entrant if getters are defined on the passed-in Object.
+
+### *napi_typeof*
+
+```C
+napi_status napi_typeof(napi_env env, napi_value value, napi_valuetype* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value whose type to query.
+- `[out] result`: The type of the JavaScript value.
+
+Returns `napi_ok` if the API succeeded.
+- `napi_invalid_arg` if the type of `value` is not a known ECMAScript type and
+ `value` is not an External value.
+
+This API represents behavior similar to invoking the `typeof` Operator on
+the object as defined in [Section 12.5.5][] of the ECMAScript Language
+Specification. However, it has support for detecting an External value.
+If `value` has a type that is invalid, an error is returned.
+
+### *napi_instanceof*
+
+```C
+napi_status napi_instanceof(napi_env env,
+ napi_value object,
+ napi_value constructor,
+ bool* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] object`: The JavaScript value to check.
+- `[in] constructor`: The JavaScript function object of the constructor
+function to check against.
+- `[out] result`: Boolean that is set to true if `object instanceof constructor`
+is true.
+
+Returns `napi_ok` if the API succeeded.
+
+This API represents invoking the `instanceof` Operator on the object as
+defined in
+[Section 12.10.4](https://tc39.github.io/ecma262/#sec-instanceofoperator)
+of the ECMAScript Language Specification.
+
+### *napi_is_array*
+
+```C
+napi_status napi_is_array(napi_env env, napi_value value, bool* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value to check.
+- `[out] result`: Whether the given object is an array.
+
+Returns `napi_ok` if the API succeeded.
+
+This API represents invoking the `IsArray` operation on the object
+as defined in [Section 7.2.2](https://tc39.github.io/ecma262/#sec-isarray)
+of the ECMAScript Language Specification.
+
+### *napi_is_arraybuffer*
+
+```C
+napi_status napi_is_arraybuffer(napi_env env, napi_value value, bool* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value to check.
+- `[out] result`: Whether the given object is an ArrayBuffer.
+
+Returns `napi_ok` if the API succeeded.
+
+This API checks if the Object passsed in is an array buffer.
+
+### *napi_is_buffer*
+
+```C
+napi_status napi_is_buffer(napi_env env, napi_value value, bool* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value to check.
+- `[out] result`: Whether the given `napi_value` represents a `node::Buffer`
+object.
+
+Returns `napi_ok` if the API succeeded.
+
+This API checks if the Object passsed in is a buffer.
+
+### *napi_is_error*
+
+```C
+napi_status napi_is_error(napi_env env, napi_value value, bool* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value to check.
+- `[out] result`: Whether the given `napi_value` represents an Error object.
+
+Returns `napi_ok` if the API succeeded.
+
+This API checks if the Object passsed in is an Error.
+
+### *napi_is_typedarray*
+
+```C
+napi_status napi_is_typedarray(napi_env env, napi_value value, bool* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] value`: The JavaScript value to check.
+- `[out] result`: Whether the given `napi_value` represents a TypedArray.
+
+Returns `napi_ok` if the API succeeded.
+
+This API checks if the Object passsed in is a typed array.
+
+### *napi_strict_equals*
+
+```C
+napi_status napi_strict_equals(napi_env env,
+ napi_value lhs,
+ napi_value rhs,
+ bool* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] lhs`: The JavaScript value to check.
+- `[in] rhs`: The JavaScript value to check against.
+- `[out] result`: Whether the two `napi_value` objects are equal.
+
+Returns `napi_ok` if the API succeeded.
+
+This API represents the invocation of the Strict Equality algorithm as
+defined in
+[Section 7.2.14](https://tc39.github.io/ecma262/#sec-strict-equality-comparison)
+of the ECMAScript Language Specification.
+
+## Working with JavaScript Properties
+
+N-API exposes a set of APIs to get and set properties on JavaScript
+objects. Some of these types are documented under
+[Section 7](https://tc39.github.io/ecma262/#sec-operations-on-objects) of the
+[ECMAScript Language Specification](https://tc39.github.io/ecma262/).
+
+Properties in JavaScript are represented as a tuple of a key and a value.
+Fundamentally, all property keys in N-API can be represented in one of the
+following forms:
+- Named: a simple UTF8-encoded string
+- Integer-Indexed: an index value represented by `uint32_t`
+- JavaScript value: these are represented in N-API by `napi_value`. This can
+be a `napi_value` representing a String, Number or Symbol.
+
+N-API values are represented by the type `napi_value`.
+Any N-API call that requires a JavaScript value takes in a `napi_value`.
+However, it's the caller's responsibility to make sure that the
+`napi_value` in question is of the JavaScript type expected by the API.
+
+The APIs documented in this section provide a simple interface to
+get and set properties on arbitrary JavaScript objects represented by
+`napi_value`.
+
+For instance, consider the following JavaScript code snippet:
+```js
+const obj = {};
+obj.myProp = 123;
+```
+The equivalent can be done using N-API values with the following snippet:
+```C
+napi_status status = napi_generic_failure;
+
+// const obj = {}
+napi_value obj, value;
+status = napi_create_object(env, &obj);
+if (status != napi_ok) return status;
+
+// Create a napi_value for 123
+status = napi_create_number(env, 123, &value);
+if (status != napi_ok) return status;
+
+// obj.myProp = 123
+status = napi_set_named_property(env, obj, "myProp", value);
+if (status != napi_ok) return status;
+```
+
+Indexed properties can be set in a similar manner. Consider the following
+JavaScript snippet:
+```js
+const arr = [];
+arr[123] = 'hello';
+```
+The equivalent can be done using N-API values with the following snippet:
+```C
+napi_status status = napi_generic_failure;
+
+// const arr = [];
+napi_value arr, value;
+status = napi_create_array(env, &arr);
+if (status != napi_ok) return status;
+
+// Create a napi_value for 'hello'
+status = napi_create_string_utf8(env, "hello", -1, &value);
+if (status != napi_ok) return status;
+
+// arr[123] = 'hello';
+status = napi_set_element(env, arr, 123, value);
+if (status != napi_ok) return status;
+```
+
+Properties can be retrieved using the APIs described in this section.
+Consider the following JavaScript snippet:
+```js
+const arr = [];
+const value = arr[123];
+```
+
+The following is the approximate equivalent of the N-API counterpart:
+```C
+napi_status status = napi_generic_failure;
+
+// const arr = []
+napi_value arr, value;
+status = napi_create_array(env, &arr);
+if (status != napi_ok) return status;
+
+// const value = arr[123]
+status = napi_get_element(env, arr, 123, &value);
+if (status != napi_ok) return status;
+```
+
+Finally, multiple properties can also be defined on an object for performance
+reasons. Consider the following JavaScript:
+```js
+const obj = {};
+Object.defineProperties(obj, {
+ 'foo': { value: 123, writable: true, configurable: true, enumerable: true },
+ 'bar': { value: 456, writable: true, configurable: true, enumerable: true }
+});
+```
+
+The following is the approximate equivalent of the N-API counterpart:
+```C
+napi_status status = napi_status_generic_failure;
+
+// const obj = {};
+napi_value obj;
+status = napi_create_obj(env, &obj);
+if (status != napi_ok) return status;
+
+// Create napi_values for 123 and 456
+napi_value fooValue, barValue;
+status = napi_create_number(env, 123, &fooValue);
+if (status != napi_ok) return status;
+status = napi_create_number(env, 456, &barValue);
+if (status != napi_ok) return status;
+
+// Set the properties
+napi_property_descriptors descriptors[] = {
+ { "foo", fooValue, 0, 0, 0, napi_default, 0 },
+ { "bar", barValue, 0, 0, 0, napi_default, 0 }
+}
+status = napi_define_properties(env,
+ obj,
+ sizeof(descriptors) / sizeof(descriptors[0]),
+ descriptors);
+if (status != napi_ok) return status;
+```
+
+### Structures
+#### *napi_property_attributes*
+```C
+typedef enum {
+ napi_default = 0,
+ napi_read_only = 1 << 0,
+ napi_dont_enum = 1 << 1,
+ napi_dont_delete = 1 << 2,
+ napi_static_property = 1 << 10,
+} napi_property_attributes;
+```
+
+`napi_property_attributes` are flags used to control the behavior of properties
+set on a JavaScript object. They roughly correspond to the attributes listed in
+[Section 6.1.7.1](https://tc39.github.io/ecma262/#table-2) of the
+[ECMAScript Language Specification](https://tc39.github.io/ecma262/). They can
+be one or more of the following bitflags:
+
+- `napi_default` - Used to indicate that no explicit attributes are set on the
+given property. By default, a property is Writable, Enumerable, and
+ Configurable. This is a deviation from the ECMAScript specification,
+ where generally the values for a property descriptor attribute default to
+ false if they're not provided.
+- `napi_read_only` - Used to indicate that a given property is not Writable.
+- `napi_dont_enum` - Used to indicate that a given property is not Enumerable.
+- `napi_dont_delete` - Used to indicate that a given property is not.
+Configurable, as defined in
+[Section 6.1.7.1](https://tc39.github.io/ecma262/#table-2) of the
+[ECMAScript Language Specification](https://tc39.github.io/ecma262/).
+- `napi_static_property` - Used to indicate that the property will be defined as
+a static property on a class as opposed to an instance property, which is the
+default. This is used only by [`napi_define_class`][]. It is ignored by
+`napi_define_properties`.
+
+#### *napi_property_descriptor*
+```C
+typedef struct {
+ const char* utf8name;
+
+ napi_callback method;
+ napi_callback getter;
+ napi_callback setter;
+ napi_value value;
+
+ napi_property_attributes attributes;
+ void* data;
+} napi_property_descriptor;
+```
+
+- `utf8name`: String describing the key for the property, encoded as UTF8.
+- `value`: The value that's retrieved by a get access of the property if the
+ property is a data property. If this is passed in, set `getter`, `setter`,
+ `method` and `data` to `NULL` (since these members won't be used).
+- `getter`: A function to call when a get access of the property is performed.
+If this is passed in, set `value` and `method` to `NULL` (since these members
+won't be used). The given function is called implicitly by the runtime when the
+property is accessed from JavaScript code (or if a get on the property is
+performed using a N-API call).
+- `setter`: A function to call when a set access of the property is performed.
+If this is passed in, set `value` and `method` to `NULL` (since these members
+won't be used). The given function is called implicitly by the runtime when the
+property is set from JavaScript code (or if a set on the property is
+performed using a N-API call).
+- `method`: Set this to make the property descriptor object's `value`
+property to be a JavaScript function represented by `method`. If this is
+passed in, set `value`, `getter` and `setter` to `NULL` (since these members
+won't be used).
+- `data`: The callback data passed into `method`, `getter` and `setter` if
+this function is invoked.
+- `attributes`: The attributes associated with the particular property.
+See [`napi_property_attributes`](#napi_property_attributes).
+
+### Functions
+#### *napi_get_property_names*
+
+```C
+napi_status napi_get_property_names(napi_env env,
+ napi_value object,
+ napi_value* result);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object from which to retrieve the properties.
+- `[out] result`: A `napi_value` representing an array of JavaScript values
+that represent the property names of the object. The API can be used to
+iterate over `result` using [`napi_get_array_length`][]
+and [`napi_get_element`][].
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns the array of propertys for the Object passed in
+
+#### *napi_set_property*
+
+```C
+napi_status napi_set_property(napi_env env,
+ napi_value object,
+ napi_value key,
+ napi_value value);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object on which to set the property.
+- `[in] key`: The name of the property to set.
+- `[in] value`: The property value.
+
+Returns `napi_ok` if the API succeeded.
+
+This API set a property on the Object passed in.
+
+#### *napi_get_property*
+
+```C
+napi_status napi_get_property(napi_env env,
+ napi_value object,
+ napi_value key,
+ napi_value* result);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object from which to retrieve the property.
+- `[in] key`: The name of the property to retrieve.
+- `[out] result`: The value of the property.
+
+Returns `napi_ok` if the API succeeded.
+
+This API gets the requested property from the Object passed in.
+
+
+#### *napi_has_property*
+
+```C
+napi_status napi_has_property(napi_env env,
+ napi_value object,
+ napi_value key,
+ bool* result);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object to query.
+- `[in] key`: The name of the property whose existence to check.
+- `[out] result`: Whether the property exists on the object or not.
+
+Returns `napi_ok` if the API succeeded.
+
+This API checks if the Object passed in has the named property.
+
+
+#### *napi_set_named_property*
+
+```C
+napi_status napi_set_named_property(napi_env env,
+ napi_value object,
+ const char* utf8Name,
+ napi_value value);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object on which to set the property.
+- `[in] utf8Name`: The name of the property to set.
+- `[in] value`: The property value.
+
+Returns `napi_ok` if the API succeeded.
+
+This method is equivalent to calling [`napi_set_property`][] with a `napi_value`
+created from the string passed in as `utf8Name`
+
+#### *napi_get_named_property*
+
+```C
+napi_status napi_get_named_property(napi_env env,
+ napi_value object,
+ const char* utf8Name,
+ napi_value* result);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object from which to retrieve the property.
+- `[in] utf8Name`: The name of the property to get.
+- `[out] result`: The value of the property.
+
+Returns `napi_ok` if the API succeeded.
+
+This method is equivalent to calling [`napi_get_property`][] with a `napi_value`
+created from the string passed in as `utf8Name`
+
+#### *napi_has_named_property*
+
+```C
+napi_status napi_has_named_property(napi_env env,
+ napi_value object,
+ const char* utf8Name,
+ bool* result);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object to query.
+- `[in] utf8Name`: The name of the property whose existence to check.
+- `[out] result`: Whether the property exists on the object or not.
+
+Returns `napi_ok` if the API succeeded.
+
+This method is equivalent to calling [`napi_has_property`][] with a `napi_value`
+created from the string passed in as `utf8Name`
+
+#### *napi_set_element*
+
+```C
+napi_status napi_set_element(napi_env env,
+ napi_value object,
+ uint32_t index,
+ napi_value value);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object from which to set the properties.
+- `[in] index`: The index of the property to set.
+- `[in] value`: The property value.
+
+Returns `napi_ok` if the API succeeded.
+
+This API sets and element on the Object passed in.
+
+#### *napi_get_element*
+
+```C
+napi_status napi_get_element(napi_env env,
+ napi_value object,
+ uint32_t index,
+ napi_value* result);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object from which to retrieve the property.
+- `[in] index`: The index of the property to get.
+- `[out] result`: The value of the property.
+
+Returns `napi_ok` if the API succeeded.
+
+This API gets the element at the requested index.
+
+#### *napi_has_element*
+
+```C
+napi_status napi_has_element(napi_env env,
+ napi_value object,
+ uint32_t index,
+ bool* result);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object to query.
+- `[in] index`: The index of the property whose existence to check.
+- `[out] result`: Whether the property exists on the object or not.
+
+Returns `napi_ok` if the API succeeded.
+
+This API returns if the Object passed in has an element at the
+requested index.
+
+#### *napi_define_properties*
+
+```C
+napi_status napi_define_properties(napi_env env,
+ napi_value object,
+ size_t property_count,
+ const napi_property_descriptor* properties);
+```
+
+- `[in] env`: The environment that the N-API call is invoked under.
+- `[in] object`: The object from which to retrieve the properties.
+- `[in] property_count`: The number of elements in the `properties` array.
+- `[in] properties`: The array of property descriptors.
+
+Returns `napi_ok` if the API succeeded.
+
+This method allows the efficient definition of multiple properties on a given
+object. The properties are defined using property descriptors (See
+[`napi_property_descriptor`][]). Given an array of such property descriptors, this
+API will set the properties on the object one at a time, as defined by
+DefineOwnProperty (described in [Section 9.1.6][] of the ECMA262 specification).
+
+## Working with JavaScript Functions
+
+N-API provides a set of APIs that allow JavaScript code to
+call back into native code. N-API APIs that support calling back
+into native code take in a callback functions represented by
+the `napi_callback` type. When the JavaScript VM calls back to
+native code, the `napi_callback` function provided is invoked. The APIs
+documented in this section allow the callback function to do the
+following:
+- Get information about the context in which the callback was invoked.
+- Get the arguments passed into the callback.
+- Return a `napi_value` back from the callback.
+
+Additionally, N-API provides a set of functions which allow calling
+JavaScript functions from native code. One can either call a function
+like a regular JavaScript function call, or as a constructor
+function.
+
+
+### *napi_call_function*
+
+```C
+napi_status napi_call_function(napi_env env,
+ napi_value recv,
+ napi_value func,
+ int argc,
+ const napi_value* argv,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] recv`: The `this` object passed to the called function.
+- `[in] func`: `napi_value` representing the JavaScript function
+to be invoked.
+- `[in] argc`: The count of elements in the `argv` array.
+- `[in] argv`: Array of `napi_values` representing JavaScript values passed
+in as arguments to the function.
+- `[out] result`: `napi_value` representing the JavaScript object returned.
+
+Returns `napi_ok` if the API succeeded.
+
+This method allows a JavaScript function object to be called from a native
+add-on. This is an primary mechanism of calling back *from* the add-on's
+native code *into* JavaScript. For special cases like calling into JavaScript
+after an async operation, see [`napi_make_callback`][].
+
+A sample use case might look as follows. Consider the following JavaScript
+snippet:
+```js
+function AddTwo(num) {
+ return num + 2;
+}
+```
+
+Then, the above function can be invoked from a native add-on using the
+following code:
+```C
+// Get the function named "AddTwo" on the global object
+napi_value global, add_two, arg;
+napi_status status = napi_get_global(env, &global);
+if (status != napi_ok) return;
+
+status = napi_get_named_property(env, global, "AddTwo", &add_two);
+if (status != napi_ok) return;
+
+// const arg = 1337
+status = napi_create_number(env, 1337, &arg);
+if (status != napi_ok) return;
+
+napi_value* argv = &arg;
+size_t argc = 1;
+
+// AddTwo(arg);
+napi_value return_val;
+status = napi_call_function(env, global, add_two, argc, argv, &return_val);
+if (status != napi_ok) return;
+
+// Convert the result back to a native type
+int32_t result;
+status = napi_get_value_int32(env, return_val, &result);
+if (status != napi_ok) return;
+```
+
+### *napi_create_function*
+
+```C
+napi_status napi_create_function(napi_env env,
+ const char* utf8name,
+ napi_callback cb,
+ void* data,
+ napi_value* result);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] utf8Name`: The name of the function encoded as UTF8. This is visible
+within JavaScript as the new function object's `name` property.
+- `[in] cb`: The native function which should be called when this function
+object is invoked.
+- `[in] data`: User-provided data context. This will be passed back into the
+function when invoked later.
+- `[out] result`: `napi_value` representing the JavaScript function object for
+the newly created function.
+
+Returns `napi_ok` if the API succeeded.
+
+This API allows an add-on author to create a function object in native code.
+This is the primary mechanism to allow calling *into* the add-on's native code
+*from* Javascript.
+
+**Note:** The newly created function is not automatically visible from
+script after this call. Instead, a property must be explicitly set on any
+object that is visible to JavaScript, in order for the function to be accessible
+from script.
+
+In order to expose a function as part of the
+add-on's module exports, set the newly created function on the exports
+object. A sample module might look as follows:
+```C
+void SayHello(napi_env env, napi_callback_info info) {
+ printf("Hello\n");
+}
+
+void Init(napi_env env, napi_value exports, napi_value module, void* priv) {
+ napi_status status;
+
+ napi_value fn;
+ status = napi_create_function(env, NULL, SayHello, NULL, &fn);
+ if (status != napi_ok) return;
+
+ status = napi_set_named_property(env, exports, "sayHello", fn);
+ if (status != napi_ok) return;
+}
+
+NAPI_MODULE(addon, Init)
+```
+
+Given the above code, the add-on can be used from JavaScript as follows:
+```js
+const myaddon = require('./addon');
+myaddon.sayHello();
+```
+
+**Note:** The string passed to require is not necessarily the name passed into
+`NAPI_MODULE` in the earlier snippet but the name of the target in `binding.gyp`
+responsible for creating the `.node` file.
+
+### *napi_get_cb_info*
+
+```C
+napi_status napi_get_cb_info(napi_env env,
+ napi_callback_info cbinfo,
+ size_t* argc,
+ napi_value* argv,
+ napi_value* thisArg,
+ void** data)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] cbinfo`: The callback info passed into the callback function.
+- `[in-out] argc`: Specifies the size of the provided `argv` array
+and receives the actual count of arguments.
+- `[out] argv`: Buffer to which the `napi_value` representing the
+arguments are copied. If there are more arguments than the provided
+count, only the requested number of arguments are copied. If there are fewer
+arguments provided than claimed, the rest of `argv` is filled with `napi_value`
+values that represent `undefined`.
+- `[out] this`: Receives the JavaScript `this` argument for the call.
+- `[out] data`: Receives the data pointer for the callback.
+
+Returns `napi_ok` if the API succeeded.
+
+This method is used within a callback function to retrieve details about the
+call like the arguments and the `this` pointer from a given callback info.
+
+### *napi_is_construct_call*
+
+```C
+napi_status napi_is_construct_call(napi_env env,
+ napi_callback_info cbinfo,
+ bool* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] cbinfo`: The callback info passed into the callback function.
+- `[out] result`: Whether the native function is being invoked as
+a constructor call.
+
+Returns `napi_ok` if the API succeeded.
+
+This API checks if the the current callback was due to a
+consructor call.
+
+### *napi_new_instance*
+
+```C
+napi_status napi_new_instance(napi_env env,
+ napi_value cons,
+ size_t argc,
+ napi_value* argv,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] cons`: `napi_value` representing the JavaScript function
+to be invoked as a constructor.
+- `[in] argc`: The count of elements in the `argv` array.
+- `[in] argv`: Array of JavaScript values as `napi_value`
+representing the arguments to the constructor.
+- `[out] result`: `napi_value` representing the JavaScript object returned,
+which in this case is the constructed object.
+
+This method is used to instantiate a new JavaScript value using a given
+`napi_value` that represents the constructor for the object. For example,
+consider the following snippet:
+```js
+function MyObject(param) {
+ this.param = param;
+}
+
+const arg = 'hello';
+const value = new MyObject(arg);
+```
+
+The following can be approximated in N-API using the following snippet:
+```C
+// Get the constructor function MyObject
+napi_value global, constructor, arg, value;
+napi_status status = napi_get_global(env, &global);
+if (status != napi_ok) return;
+
+status = napi_get_named_property(env, global, "MyObject", &constructor);
+if (status != napi_ok) return;
+
+// const arg = "hello"
+status = napi_create_string_utf8(env, "hello", -1, &arg);
+if (status != napi_ok) return;
+
+napi_value* argv = &arg;
+size_t argc = 1;
+
+// const value = new MyObject(arg)
+status = napi_new_instance(env, constructor, argc, argv, &value);
+```
+
+Returns `napi_ok` if the API succeeded.
+
+### *napi_make_callback*
+
+```C
+napi_status napi_make_callback(napi_env env,
+ napi_value recv,
+ napi_value func,
+ int argc,
+ const napi_value* argv,
+ napi_value* result)
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] recv`: The `this` object passed to the called function.
+- `[in] func`: `napi_value` representing the JavaScript function
+to be invoked.
+- `[in] argc`: The count of elements in the `argv` array.
+- `[in] argv`: Array of JavaScript values as `napi_value`
+representing the arguments to the function.
+- `[out] result`: `napi_value` representing the JavaScript object returned.
+
+Returns `napi_ok` if the API succeeded.
+
+This method allows a JavaScript function object to be called from a native
+add-on. This API is similar to `napi_call_function`. However, it is used to call
+*from* native code back *into* JavaScript *after* returning from an async
+operation (when there is no other script on the stack). It is a fairly simple
+wrapper around `node::MakeCallback`.
+
+For an example on how to use `napi_make_callback`, see the section on
+[Asynchronous Operations][].
+
+## Object Wrap
+
+N-API offers a way to "wrap" C++ classes and instances so that the class
+constructor and methods can be called from JavaScript.
+
+ 1. The [`napi_define_class`][] API defines a JavaScript class with constructor,
+ static properties and methods, and instance properties and methods that
+ correspond to the The C++ class.
+ 2. When JavaScript code invokes the constructor, the constructor callback
+ uses [`napi_wrap`][] to wrap a new C++ instance in a JavaScript object,
+ then returns the wrapper object.
+ 3. When JavaScript code invokes a method or property accessor on the class,
+ the corresponding `napi_callback` C++ function is invoked. For an instance
+ callback, [`napi_unwrap`][] obtains the C++ instance that is the target of
+ the call.
+
+### *napi_define_class*
+
+```C
+napi_status napi_define_class(napi_env env,
+ const char* utf8name,
+ napi_callback constructor,
+ void* data,
+ size_t property_count,
+ const napi_property_descriptor* properties,
+ napi_value* result);
+```
+
+ - `[in] env`: The environment that the API is invoked under.
+ - `[in] utf8name`: Name of the JavaScript constructor function; this is
+ not required to be the same as the C++ class name, though it is recommended
+ for clarity.
+ - `[in] constructor`: Callback function that handles constructing instances
+ of the class. (This should be a static method on the class, not an actual
+ C++ constructor function.)
+ - `[in] data`: Optional data to be passed to the constructor callback as
+ the `data` property of the callback info.
+ - `[in] property_count`: Number of items in the `properties` array argument.
+ - `[in] properties`: Array of property descriptors describing static and
+ instance data properties, accessors, and methods on the class
+ See `napi_property_descriptor`.
+ - `[out] result`: A `napi_value` representing the constructor function for
+ the class.
+
+Returns `napi_ok` if the API succeeded.
+
+Defines a JavaScript class that corresponds to a C++ class, including:
+ - A JavaScript constructor function that has the class name and invokes the
+ provided C++ constructor callback.
+ - Properties on the constructor function corresponding to _static_ data
+ properties, accessors, and methods of the C++ class (defined by
+ property descriptors with the `napi_static` attribute).
+ - Properties on the constructor function's `prototype` object corresponding to
+ _non-static_ data properties, accessors, and methods of the C++ class
+ (defined by property descriptors without the `napi_static` attribute).
+
+The C++ constructor callback should be a static method on the class that calls
+the actual class constructor, then wraps the new C++ instance in a JavaScript
+object, and returns the wrapper object. See `napi_wrap()` for details.
+
+The JavaScript constructor function returned from [`napi_define_class`][] is
+often saved and used later, to construct new instances of the class from native
+code, and/or check whether provided values are instances of the class. In that
+case, to prevent the function value from being garbage-collected, create a
+persistent reference to it using [`napi_create_reference`][] and ensure the
+reference count is kept >= 1.
+
+### *napi_wrap*
+
+```C
+napi_status napi_wrap(napi_env env,
+ napi_value js_object,
+ void* native_object,
+ napi_finalize finalize_cb,
+ void* finalize_hint,
+ napi_ref* result);
+```
+
+ - `[in] env`: The environment that the API is invoked under.
+ - `[in] js_object`: The JavaScript object that will be the wrapper for the
+ native object. This object _must_ have been created from the `prototype` of
+ a constructor that was created using `napi_define_class()`.
+ - `[in] native_object`: The native instance that will be wrapped in the
+ JavaScript object.
+ - `[in] finalize_cb`: Optional native callback that can be used to free the
+ native instance when the JavaScript object is ready for garbage-collection.
+ - `[in] finalize_hint`: Optional contextual hint that is passed to the
+ finalize callback.
+ - `[out] result`: Optional reference to the wrapped object.
+
+Returns `napi_ok` if the API succeeded.
+
+Wraps a native instance in JavaScript object of the corresponding type.
+
+When JavaScript code invokes a constructor for a class that was defined using
+`napi_define_class()`, the `napi_callback` for the constructor is invoked.
+After constructing an instance of the native class, the callback must then call
+`napi_wrap()` to wrap the newly constructed instance in the already-created
+JavaScript object that is the `this` argument to the constructor callback.
+(That `this` object was created from the constructor function's `prototype`,
+so it already has definitions of all the instance properties and methods.)
+
+Typically when wrapping a class instance, a finalize callback should be
+provided that simply deletes the native instance that is received as the `data`
+argument to the finalize callback.
+
+The optional returned reference is initially a weak reference, meaning it
+has a reference count of 0. Typically this reference count would be incremented
+temporarily during async operations that require the instance to remain valid.
+
+Caution: The optional returned reference (if obtained) should be deleted via
+[`napi_delete_reference`][] ONLY in response to the finalize callback invocation.
+(If it is deleted before then, then the finalize callback may never be
+invoked.) Therefore when obtaining a reference a finalize callback is also
+required in order to enable correct proper of the reference.
+
+### *napi_unwrap*
+
+```C
+napi_status napi_unwrap(napi_env env,
+ napi_value js_object,
+ void** result);
+```
+
+ - `[in] env`: The environment that the API is invoked under.
+ - `[in] js_object`: The object associated with the C++ class instance.
+ - `[out] result`: Pointer to the wrapped C++ class instance.
+
+Returns `napi_ok` if the API succeeded.
+
+When JavaScript code invokes a method or property accessor on the class, the
+corresponding `napi_callback` is invoked. If the callback is for an instance
+method or accessor, then the `this` argument to the callback is the wrapper
+object; the wrapped C++ instance that is the target of the call can be obtained
+then by calling `napi_unwrap()` on the wrapper object.
+
+## Asynchronous Operations
+
+Addon modules often need to leverage async helpers from libuv as part of their
+implementation. This allows them to schedule work to be executed asynchronously
+so that their methods can return in advance of the work being completed. This
+is important in order to allow them to avoid blocking overall execution
+of the Node.js application.
+
+N-API provides an ABI-stable interface for these
+supporting functions which covers the most common asynchronous use cases.
+
+N-API defines the `napi_work` structure which is used to manage
+asynchronous workers. Instances are created/deleted with
+[`napi_create_async_work`][] and [`napi_delete_async_work`][].
+
+The `execute` and `complete` callbacks are functions that will be
+invoked when the executor is ready to execute and when it completes its
+task respectively. These functions implement the following interfaces:
+
+```C
+typedef void (*napi_async_execute_callback)(napi_env env,
+ void* data);
+typedef void (*napi_async_complete_callback)(napi_env env,
+ napi_status status,
+ void* data);
+```
+
+
+When these methods are invoked, the `data` parameter passed will be the
+addon-provided void* data that was passed into the
+`napi_create_async_work` call.
+
+Once created the async worker can be queued
+for execution using the [`napi_queue_async_work`][] function:
+
+```C
+NAPI_EXTERN napi_status napi_queue_async_work(napi_env env,
+ napi_async_work work);
+```
+
+[`napi_cancel_async_work`][] can be used if the work needs
+to be cancelled before the work has started execution.
+
+After calling [`napi_cancel_async_work`][], the `complete` callback
+will be invoked with a status value of `napi_cancelled`.
+The work should not be deleted before the `complete`
+callback invocation, even when it was cancelled.
+
+**Note:** As mentioned in the section on memory management, if
+the code to be run in the callbacks will create N-API values, then
+N-API handle scope functions must be used to create/destroy a
+`napi_handle_scope` such that the scope is active when
+objects can be created.
+
+
+### napi_create_async_work
+
+```C
+NAPI_EXTERN
+napi_status napi_create_async_work(napi_env env,
+ napi_async_execute_callback execute,
+ napi_async_complete_callback complete,
+ void* data,
+ napi_async_work* result);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] execute`: The native function which should be called to excute
+the logic asynchronously.
+- `[in] complete`: The native function which will be called when the
+asynchronous logic is comple or is cancelled.
+- `[in] data`: User-provided data context. This will be passed back into the
+execute and complete functions.
+- `[out] result`: `napi_async_work*` which is the handle to the newly created
+async work.
+
+Returns `napi_ok` if the API succeeded.
+
+This API allocates a work object that is used to execute logic asynchronously.
+It should be freed using [`napi_delete_async_work`][] once the work is no longer
+required.
+
+### napi_delete_async_work
+
+```C
+NAPI_EXTERN napi_status napi_delete_async_work(napi_env env,
+ napi_async_work work);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] work`: The handle returned by the call to `napi_create_async_work`.
+
+Returns `napi_ok` if the API succeeded.
+
+This API frees a previously allocated work object.
+
+### napi_queue_async_work
+
+```C
+NAPI_EXTERN napi_status napi_queue_async_work(napi_env env,
+ napi_async_work work);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] work`: The handle returned by the call to `napi_create_async_work`.
+
+Returns `napi_ok` if the API succeeded.
+
+This API requests that the previously allocated work be scheduled
+for execution.
+
+### napi_cancel_async_work
+
+```C
+NAPI_EXTERN napi_status napi_cancel_async_work(napi_env env,
+ napi_async_work work);
+```
+
+- `[in] env`: The environment that the API is invoked under.
+- `[in] work`: The handle returned by the call to `napi_create_async_work`.
+
+Returns `napi_ok` if the API succeeded.
+
+This API cancels a previously allocated work, provided
+it has not yet been queued for execution. After this function is called
+the `complete` callback will be invoked with a status value of
+`napi_cancelled`. The work should not be deleted before the `complete`
+callback invocation, even when it was cancelled.
+
+
+[Aynchronous Operations]: #n_api_asynchronous_operations
+[Basic N-API Data Types]: #n_api_basic_n_api_data_types
+[ECMAScript Language Specification]: https://tc39.github.io/ecma262/
+[Error Handling]: #n_api_error_handling
+[Module Registration]: #n_api_module_registration
+[Native Abstractions for Node.js]: https://github.com/nodejs/nan
+[Object Lifetime Management]: #n_api_object_lifetime_management
+[Object Wrap]: #n_api_object_wrap
+[Section 9.1.6]: https://tc39.github.io/ecma262/#sec-ordinary-object-internal-methods-and-internal-slots-defineownproperty-p-desc
+[Section 12.5.5]: https://tc39.github.io/ecma262/#sec-typeof-operator
+[Working with JavaScript Functions]: #n_api_working_with_javascript_functions
+[Working with JavaScript Properties]: #n_api_working_with_javascript_properties
+[Working with JavaScript Values]: #n_api_working_with_javascript_values
+[Working with JavaScript Values - Abstract Operations]: #n_api_working_with_javascript_values_abstract_operations
+
+[`napi_cancel_async_work`]: #n_api_napi_cancel_async_work
+[`napi_close_escapable_handle_scope`]: #n_api_napi_close_escapable_handle_scope
+[`napi_close_handle_scope`]: #n_api_napi_close_handle_scope
+[`napi_create_async_work`]: #n_api_napi_create_async_work
+[`napi_create_error`]: #n_api_napi_create_error
+[`napi_create_external_arraybuffer`][]: #n_api_napi_create_external_arraybuffer
+[`napi_create_range_error`]: #n_api_napi_create_range_error
+[`napi_create_reference`]: #n_api_napi_create_reference
+[`napi_create_type_error`]: #n_api_napi_create_type_error
+[`napi_define_class`]: #n_api_napi_define_class
+[`napi_delete_async_work`]: #n_api_napi_delete_async_work
+[`napi_define_class`][]: #n_api_napi_define_class
+[`napi_delete_reference`]: #n_api_napi_delete_reference
+[`napi_escape_handle`]: #n_api_napi_escape_handle
+[`napi_get_array_length`]: #n_api_napi_get_array_length
+[`napi_get_element`]: #n_api_napi_get_element
+[`napi_get_property`]: #n_api_napi_get_property
+[`napi_has_property`]: #n_api_napi_has_property
+[`napi_set_property`]: #n_api_napi_set_property
+[`napi_get_reference_value`]: #n_api_napi_get_reference_value
+[`napi_is_error`]: #n_api_napi_is_error
+[`napi_is_exception_pending`]: #n_api_napi_is_exception_pending
+[`napi_get_last_error_info`]: #n_api_napi_get_last_error_info
+[`napi_get_and_clear_last_exception`]: #n_api_napi_get_and_clear_last_exception
+[`napi_make_callback`]: #n_api_napi_make_callback
+[`napi_open_escapable_handle_scope`]: #n_api_napi_open_escapable_handle_scope
+[`napi_open_handle_scope`]: #n_api_napi_open_handle_scope
+[`napi_property_descriptor`]: #n_api_napi_property_descriptor
+[`napi_queue_async_work`]: #n_api_napi_queue_async_work
+[`napi_reference_ref`]: #n_api_napi_reference_ref
+[`napi_reference_unref`]: #n_api_napi_reference_unref
+[`napi_throw_error`]: #n_api_napi_throw_error
+[`napi_throw_range_error`]: #n_api_napi_throw_range_error
+[`napi_throw_type_error`]: #n_api_napi_throw_type_error
+[`napi_unwrap`]: #n_api_napi_unwrap
+[`napi_wrap`]: #n_api_napi_wrap
From 5d2afb2174ef15bade62de882cb78cc4f449a59f Mon Sep 17 00:00:00 2001
From: gwer
Date: Sun, 23 Apr 2017 02:01:54 +0300
Subject: [PATCH 069/227] test: replace indexOf with includes
Start the transition to Array.prototype.includes() and
String.prototype.includes(). This commit refactors most of the
comparisons of Array.prototype.indexOf() and String.prototype.indexOf()
return values with -1 to the former methods in tests.
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12604
Refs: https://github.com/nodejs/node/issues/12586
Reviewed-By: Alexey Orlenko
Reviewed-By: James M Snell
Reviewed-By: Gibson Fahnestock
---
test/addons-napi/test_constructor/test.js | 16 ++++++++--------
test/addons-napi/test_properties/test.js | 16 ++++++++--------
2 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/test/addons-napi/test_constructor/test.js b/test/addons-napi/test_constructor/test.js
index 92440bf49e74c1..26083db7a28a21 100644
--- a/test/addons-napi/test_constructor/test.js
+++ b/test/addons-napi/test_constructor/test.js
@@ -22,14 +22,14 @@ const propertyNames = [];
for (const name in test_object) {
propertyNames.push(name);
}
-assert.ok(propertyNames.indexOf('echo') >= 0);
-assert.ok(propertyNames.indexOf('readwriteValue') >= 0);
-assert.ok(propertyNames.indexOf('readonlyValue') >= 0);
-assert.ok(propertyNames.indexOf('hiddenValue') < 0);
-assert.ok(propertyNames.indexOf('readwriteAccessor1') < 0);
-assert.ok(propertyNames.indexOf('readwriteAccessor2') < 0);
-assert.ok(propertyNames.indexOf('readonlyAccessor1') < 0);
-assert.ok(propertyNames.indexOf('readonlyAccessor2') < 0);
+assert.ok(propertyNames.includes('echo'));
+assert.ok(propertyNames.includes('readwriteValue'));
+assert.ok(propertyNames.includes('readonlyValue'));
+assert.ok(!propertyNames.includes('hiddenValue'));
+assert.ok(!propertyNames.includes('readwriteAccessor1'));
+assert.ok(!propertyNames.includes('readwriteAccessor2'));
+assert.ok(!propertyNames.includes('readonlyAccessor1'));
+assert.ok(!propertyNames.includes('readonlyAccessor2'));
// The napi_writable attribute should be ignored for accessors.
test_object.readwriteAccessor1 = 1;
diff --git a/test/addons-napi/test_properties/test.js b/test/addons-napi/test_properties/test.js
index 868c603879f1a2..a8127a27860eb3 100644
--- a/test/addons-napi/test_properties/test.js
+++ b/test/addons-napi/test_properties/test.js
@@ -21,14 +21,14 @@ const propertyNames = [];
for (const name in test_object) {
propertyNames.push(name);
}
-assert.ok(propertyNames.indexOf('echo') >= 0);
-assert.ok(propertyNames.indexOf('readwriteValue') >= 0);
-assert.ok(propertyNames.indexOf('readonlyValue') >= 0);
-assert.ok(propertyNames.indexOf('hiddenValue') < 0);
-assert.ok(propertyNames.indexOf('readwriteAccessor1') < 0);
-assert.ok(propertyNames.indexOf('readwriteAccessor2') < 0);
-assert.ok(propertyNames.indexOf('readonlyAccessor1') < 0);
-assert.ok(propertyNames.indexOf('readonlyAccessor2') < 0);
+assert.ok(propertyNames.includes('echo'));
+assert.ok(propertyNames.includes('readwriteValue'));
+assert.ok(propertyNames.includes('readonlyValue'));
+assert.ok(!propertyNames.includes('hiddenValue'));
+assert.ok(!propertyNames.includes('readwriteAccessor1'));
+assert.ok(!propertyNames.includes('readwriteAccessor2'));
+assert.ok(!propertyNames.includes('readonlyAccessor1'));
+assert.ok(!propertyNames.includes('readonlyAccessor2'));
// The napi_writable attribute should be ignored for accessors.
test_object.readwriteAccessor1 = 1;
From b2bf6c873f4c29fdce6c5b22314327df4e93791e Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Thu, 27 Apr 2017 17:27:05 -0700
Subject: [PATCH 070/227] test,lib,doc: use function declarations
Replace function expressions with function declarations in preparation
for a lint rule requiring function declarations.
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12711
Reviewed-By: Vse Mozhet Byt
Reviewed-By: Gibson Fahnestock
---
test/addons-napi/test_async/test.js | 5 ++---
test/addons-napi/test_exception/test.js | 6 +++---
test/addons-napi/test_instanceof/test.js | 4 ++--
3 files changed, 7 insertions(+), 8 deletions(-)
diff --git a/test/addons-napi/test_async/test.js b/test/addons-napi/test_async/test.js
index 7c140d79fc054f..2b4577624a371e 100644
--- a/test/addons-napi/test_async/test.js
+++ b/test/addons-napi/test_async/test.js
@@ -6,8 +6,7 @@ const test_async = require(`./build/${common.buildType}/test_async`);
test_async.Test(5, common.mustCall(function(err, val) {
assert.strictEqual(err, null);
assert.strictEqual(val, 10);
- process.nextTick(common.mustCall(function() {}));
+ process.nextTick(common.mustCall());
}));
-const cancelSuceeded = function() {};
-test_async.TestCancel(common.mustCall(cancelSuceeded));
+test_async.TestCancel(common.mustCall());
diff --git a/test/addons-napi/test_exception/test.js b/test/addons-napi/test_exception/test.js
index 83d2b5000ec54a..94f9566a4b341f 100644
--- a/test/addons-napi/test_exception/test.js
+++ b/test/addons-napi/test_exception/test.js
@@ -4,12 +4,12 @@ const common = require('../../common');
const test_exception = require(`./build/${common.buildType}/test_exception`);
const assert = require('assert');
const theError = new Error('Some error');
-const throwTheError = function() {
+function throwTheError() {
throw theError;
-};
+}
let caughtError;
-const throwNoError = function() {};
+const throwNoError = common.noop;
// Test that the native side successfully captures the exception
let returnedError = test_exception.returnException(throwTheError);
diff --git a/test/addons-napi/test_instanceof/test.js b/test/addons-napi/test_instanceof/test.js
index 38d17031e9a6c9..418149d1909e6f 100644
--- a/test/addons-napi/test_instanceof/test.js
+++ b/test/addons-napi/test_instanceof/test.js
@@ -57,14 +57,14 @@ if (typeof Symbol !== 'undefined' && 'hasInstance' in Symbol &&
(theObject instanceof theConstructor));
}
- const MyClass = function MyClass() {};
+ function MyClass() {}
Object.defineProperty(MyClass, Symbol.hasInstance, {
value: function(candidate) {
return 'mark' in candidate;
}
});
- const MySubClass = function MySubClass() {};
+ function MySubClass() {}
MySubClass.prototype = new MyClass();
let x = new MySubClass();
From 7507d1e0e66dc69852f0a230d01080fab4ac4c08 Mon Sep 17 00:00:00 2001
From: Gabriel Schulhof
Date: Thu, 27 Apr 2017 22:15:36 +0300
Subject: [PATCH 071/227] n-api: remove unnecessary try-catch bracket from
certain APIs
These APIs do not need a try-catch around their body, because no
exceptions are thrown in their implementation:
- `napi_is_array()`
- `napi_get_value_string_latin1()`
- `napi_get_value_string_utf8()`
- `napi_get_value_string_utf16()`
- `napi_get_value_external()`
- `napi_is_buffer()`
- `napi_is_arraybuffer()`
- `napi_get_arraybuffer_info()`
- `napi_is_typedarray()`
- `napi_get_typedarray_info()`
Fixes: https://github.com/nodejs/abi-stable-node/issues/238
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12705
Reviewed-By: Michael Dawson
Reviewed-By: Colin Ihrig
Reviewed-By: Jason Ginchereau
Reviewed-By: Anna Henningsen
---
src/node_api.cc | 42 +++++++++++++++++++++---------------------
1 file changed, 21 insertions(+), 21 deletions(-)
diff --git a/src/node_api.cc b/src/node_api.cc
index d1ca7bec27c269..4113124b093419 100644
--- a/src/node_api.cc
+++ b/src/node_api.cc
@@ -1209,14 +1209,14 @@ napi_status napi_define_properties(napi_env env,
}
napi_status napi_is_array(napi_env env, napi_value value, bool* result) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, value);
CHECK_ARG(env, result);
v8::Local val = v8impl::V8LocalValueFromJsValue(value);
*result = val->IsArray();
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
napi_status napi_get_array_length(napi_env env,
@@ -1777,7 +1777,7 @@ napi_status napi_get_value_string_latin1(napi_env env,
char* buf,
size_t bufsize,
size_t* result) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, value);
v8::Local val = v8impl::V8LocalValueFromJsValue(value);
@@ -1797,7 +1797,7 @@ napi_status napi_get_value_string_latin1(napi_env env,
}
}
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
// Copies a JavaScript string into a UTF-8 string buffer. The result is the
@@ -1813,7 +1813,7 @@ napi_status napi_get_value_string_utf8(napi_env env,
char* buf,
size_t bufsize,
size_t* result) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, value);
v8::Local val = v8impl::V8LocalValueFromJsValue(value);
@@ -1833,7 +1833,7 @@ napi_status napi_get_value_string_utf8(napi_env env,
}
}
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
// Copies a JavaScript string into a UTF-16 string buffer. The result is the
@@ -1849,7 +1849,7 @@ napi_status napi_get_value_string_utf16(napi_env env,
char16_t* buf,
size_t bufsize,
size_t* result) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, value);
v8::Local val = v8impl::V8LocalValueFromJsValue(value);
@@ -1870,7 +1870,7 @@ napi_status napi_get_value_string_utf16(napi_env env,
}
}
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
napi_status napi_coerce_to_object(napi_env env,
@@ -2024,13 +2024,13 @@ napi_status napi_create_external(napi_env env,
*result = v8impl::JsValueFromV8LocalValue(external_value);
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
napi_status napi_get_value_external(napi_env env,
napi_value value,
void** result) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, value);
CHECK_ARG(env, result);
@@ -2040,7 +2040,7 @@ napi_status napi_get_value_external(napi_env env,
v8::Local external_value = val.As();
*result = external_value->Value();
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
// Set initial_refcount to 0 for a weak reference, >0 for a strong reference.
@@ -2481,12 +2481,12 @@ napi_status napi_create_buffer_copy(napi_env env,
}
napi_status napi_is_buffer(napi_env env, napi_value value, bool* result) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, value);
CHECK_ARG(env, result);
*result = node::Buffer::HasInstance(v8impl::V8LocalValueFromJsValue(value));
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
napi_status napi_get_buffer_info(napi_env env,
@@ -2510,14 +2510,14 @@ napi_status napi_get_buffer_info(napi_env env,
}
napi_status napi_is_arraybuffer(napi_env env, napi_value value, bool* result) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, value);
CHECK_ARG(env, result);
v8::Local val = v8impl::V8LocalValueFromJsValue(value);
*result = val->IsArrayBuffer();
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
napi_status napi_create_arraybuffer(napi_env env,
@@ -2574,7 +2574,7 @@ napi_status napi_get_arraybuffer_info(napi_env env,
napi_value arraybuffer,
void** data,
size_t* byte_length) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, arraybuffer);
v8::Local value = v8impl::V8LocalValueFromJsValue(arraybuffer);
@@ -2591,18 +2591,18 @@ napi_status napi_get_arraybuffer_info(napi_env env,
*byte_length = contents.ByteLength();
}
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
napi_status napi_is_typedarray(napi_env env, napi_value value, bool* result) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, value);
CHECK_ARG(env, result);
v8::Local val = v8impl::V8LocalValueFromJsValue(value);
*result = val->IsTypedArray();
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
napi_status napi_create_typedarray(napi_env env,
@@ -2664,7 +2664,7 @@ napi_status napi_get_typedarray_info(napi_env env,
void** data,
napi_value* arraybuffer,
size_t* byte_offset) {
- NAPI_PREAMBLE(env);
+ CHECK_ENV(env);
CHECK_ARG(env, typedarray);
v8::Local value = v8impl::V8LocalValueFromJsValue(typedarray);
@@ -2712,7 +2712,7 @@ napi_status napi_get_typedarray_info(napi_env env,
*byte_offset = array->ByteOffset();
}
- return GET_RETURN_STATUS(env);
+ return napi_clear_last_error(env);
}
namespace uvimpl {
From 1785f3cf447d9c0db1f71a5a5e5c5007f3f3d7d1 Mon Sep 17 00:00:00 2001
From: Michael Dawson
Date: Fri, 28 Apr 2017 15:56:21 -0400
Subject: [PATCH 072/227] test: fix warning in n-api reference test
Add cast to avoid warning during build of addon.
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12730
Reviewed-By: James M Snell
Reviewed-By: Colin Ihrig
Reviewed-By: Anna Henningsen
---
test/addons-napi/test_reference/test_reference.c | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/test/addons-napi/test_reference/test_reference.c b/test/addons-napi/test_reference/test_reference.c
index 1a238a560aac53..9be9fd135fb0e9 100644
--- a/test/addons-napi/test_reference/test_reference.c
+++ b/test/addons-napi/test_reference/test_reference.c
@@ -60,10 +60,10 @@ napi_value CheckExternal(napi_env env, napi_callback_info info) {
NAPI_ASSERT(env, argtype == napi_external, "Expected an external value.")
- int* data;
+ void* data;
NAPI_CALL(env, napi_get_value_external(env, arg, &data));
- NAPI_ASSERT(env, data != NULL && *data == test_value,
+ NAPI_ASSERT(env, data != NULL && *(int*)data == test_value,
"An external data value of 1 was expected.")
return NULL;
From f09677fdba9e40545205cfffd0a4232e84145e99 Mon Sep 17 00:00:00 2001
From: Michael Dawson
Date: Fri, 28 Apr 2017 15:28:34 -0400
Subject: [PATCH 073/227] test: add coverage for error apis
Add coverage for N-API functions related to
throwing and creating errors. A number of these
are currently showing as not having any
coverage in the nightly code coverage reports.
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12729
Reviewed-By: Colin Ihrig
Reviewed-By: James M Snell
---
test/addons-napi/test_error/test.js | 34 ++++++++++++++
test/addons-napi/test_error/test_error.cc | 55 +++++++++++++++++++++++
2 files changed, 89 insertions(+)
diff --git a/test/addons-napi/test_error/test.js b/test/addons-napi/test_error/test.js
index 521c29250d5ffa..f7479f2c9a64d8 100644
--- a/test/addons-napi/test_error/test.js
+++ b/test/addons-napi/test_error/test.js
@@ -55,3 +55,37 @@ assert.strictEqual(test_error.checkError({}), false,
// Test that non-error primitive is correctly classed
assert.strictEqual(test_error.checkError('non-object'), false,
'Non-error primitive correctly classed by napi_is_error');
+
+assert.throws(() => {
+ test_error.throwExistingError();
+}, /^Error: existing error$/);
+
+assert.throws(() => {
+ test_error.throwError();
+}, /^Error: error$/);
+
+assert.throws(() => {
+ test_error.throwRangeError();
+}, /^RangeError: range error$/);
+
+assert.throws(() => {
+ test_error.throwTypeError();
+}, /^TypeError: type error$/);
+
+let error = test_error.createError();
+assert.ok(error instanceof Error, 'expected error to be an instance of Error');
+assert.strictEqual(error.message, 'error', 'expected message to be "error"');
+
+error = test_error.createRangeError();
+assert.ok(error instanceof RangeError,
+ 'expected error to be an instance of RangeError');
+assert.strictEqual(error.message,
+ 'range error',
+ 'expected message to be "range error"');
+
+error = test_error.createTypeError();
+assert.ok(error instanceof TypeError,
+ 'expected error to be an instance of TypeError');
+assert.strictEqual(error.message,
+ 'type error',
+ 'expected message to be "type error"');
diff --git a/test/addons-napi/test_error/test_error.cc b/test/addons-napi/test_error/test_error.cc
index eb616cac371b0c..ddba2059f23be6 100644
--- a/test/addons-napi/test_error/test_error.cc
+++ b/test/addons-napi/test_error/test_error.cc
@@ -15,9 +15,64 @@ napi_value checkError(napi_env env, napi_callback_info info) {
return result;
}
+napi_value throwExistingError(napi_env env, napi_callback_info info) {
+ napi_value message;
+ napi_value error;
+ NAPI_CALL(env, napi_create_string_utf8(env, "existing error", -1, &message));
+ NAPI_CALL(env, napi_create_error(env, message, &error));
+ NAPI_CALL(env, napi_throw(env, error));
+ return nullptr;
+}
+
+napi_value throwError(napi_env env, napi_callback_info info) {
+ NAPI_CALL(env, napi_throw_error(env, "error"));
+ return nullptr;
+}
+
+napi_value throwRangeError(napi_env env, napi_callback_info info) {
+ NAPI_CALL(env, napi_throw_range_error(env, "range error"));
+ return nullptr;
+}
+
+napi_value throwTypeError(napi_env env, napi_callback_info info) {
+ NAPI_CALL(env, napi_throw_type_error(env, "type error"));
+ return nullptr;
+}
+
+napi_value createError(napi_env env, napi_callback_info info) {
+ napi_value result;
+ napi_value message;
+ NAPI_CALL(env, napi_create_string_utf8(env, "error", -1, &message));
+ NAPI_CALL(env, napi_create_error(env, message, &result));
+ return result;
+}
+
+napi_value createRangeError(napi_env env, napi_callback_info info) {
+ napi_value result;
+ napi_value message;
+ NAPI_CALL(env, napi_create_string_utf8(env, "range error", -1, &message));
+ NAPI_CALL(env, napi_create_range_error(env, message, &result));
+ return result;
+}
+
+napi_value createTypeError(napi_env env, napi_callback_info info) {
+ napi_value result;
+ napi_value message;
+ NAPI_CALL(env, napi_create_string_utf8(env, "type error", -1, &message));
+ NAPI_CALL(env, napi_create_type_error(env, message, &result));
+ return result;
+}
+
void Init(napi_env env, napi_value exports, napi_value module, void* priv) {
napi_property_descriptor descriptors[] = {
DECLARE_NAPI_PROPERTY("checkError", checkError),
+ DECLARE_NAPI_PROPERTY("throwExistingError", throwExistingError),
+ DECLARE_NAPI_PROPERTY("throwError", throwError),
+ DECLARE_NAPI_PROPERTY("throwRangeError", throwRangeError),
+ DECLARE_NAPI_PROPERTY("throwTypeError", throwTypeError),
+ DECLARE_NAPI_PROPERTY("createError", createError),
+ DECLARE_NAPI_PROPERTY("createRangeError", createRangeError),
+ DECLARE_NAPI_PROPERTY("createTypeError", createTypeError),
};
NAPI_CALL_RETURN_VOID(env, napi_define_properties(
From dd7665a68eabf4d10e8fb33e8a1cca1db27dbfd5 Mon Sep 17 00:00:00 2001
From: Hitesh Kanwathirtha
Date: Thu, 13 Apr 2017 20:21:52 -0700
Subject: [PATCH 074/227] test: port test for make_callback to n-api
Improved test coverage for napi_make_callback by porting the
existing addons/make_callback test to n-api
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12409
Reviewed-By: Anna Henningsen
Reviewed-By: Benjamin Gruenbaum
Reviewed-By: James M Snell
Reviewed-By: Refael Ackermann
---
.../addons-napi/test_make_callback/binding.cc | 48 ++++++
.../test_make_callback/binding.gyp | 9 ++
test/addons-napi/test_make_callback/test.js | 83 ++++++++++
.../test_make_callback_recurse/binding.cc | 32 ++++
.../test_make_callback_recurse/binding.gyp | 9 ++
.../test_make_callback_recurse/test.js | 151 ++++++++++++++++++
6 files changed, 332 insertions(+)
create mode 100644 test/addons-napi/test_make_callback/binding.cc
create mode 100644 test/addons-napi/test_make_callback/binding.gyp
create mode 100644 test/addons-napi/test_make_callback/test.js
create mode 100644 test/addons-napi/test_make_callback_recurse/binding.cc
create mode 100644 test/addons-napi/test_make_callback_recurse/binding.gyp
create mode 100644 test/addons-napi/test_make_callback_recurse/test.js
diff --git a/test/addons-napi/test_make_callback/binding.cc b/test/addons-napi/test_make_callback/binding.cc
new file mode 100644
index 00000000000000..987b024098598c
--- /dev/null
+++ b/test/addons-napi/test_make_callback/binding.cc
@@ -0,0 +1,48 @@
+#include
+#include "../common.h"
+#include
+
+namespace {
+
+napi_value MakeCallback(napi_env env, napi_callback_info info) {
+ const int kMaxArgs = 10;
+ size_t argc = kMaxArgs;
+ napi_value args[kMaxArgs];
+ NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL));
+
+ NAPI_ASSERT(env, argc > 0, "Wrong number of arguments");
+
+ napi_value recv = args[0];
+ napi_value func = args[1];
+
+ std::vector argv;
+ for (size_t n = 2; n < argc; n += 1) {
+ argv.push_back(args[n]);
+ }
+
+ napi_valuetype func_type;
+
+ NAPI_CALL(env, napi_typeof(env, func, &func_type));
+
+ napi_value result;
+ if (func_type == napi_function) {
+ NAPI_CALL(env,
+ napi_make_callback(env, recv, func, argv.size(), argv.data(), &result));
+ } else {
+ NAPI_ASSERT(env, false, "Unexpected argument type");
+ }
+
+ return result;
+}
+
+void Init(napi_env env, napi_value exports, napi_value module, void* priv) {
+ napi_value fn;
+ NAPI_CALL_RETURN_VOID(env,
+ napi_create_function(env, NULL, MakeCallback, NULL, &fn));
+ NAPI_CALL_RETURN_VOID(env,
+ napi_set_named_property(env, exports, "makeCallback", fn));
+}
+
+} // namespace
+
+NAPI_MODULE(binding, Init)
diff --git a/test/addons-napi/test_make_callback/binding.gyp b/test/addons-napi/test_make_callback/binding.gyp
new file mode 100644
index 00000000000000..7ede63d94a0d77
--- /dev/null
+++ b/test/addons-napi/test_make_callback/binding.gyp
@@ -0,0 +1,9 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'binding',
+ 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ],
+ 'sources': [ 'binding.cc' ]
+ }
+ ]
+}
diff --git a/test/addons-napi/test_make_callback/test.js b/test/addons-napi/test_make_callback/test.js
new file mode 100644
index 00000000000000..c4f24872bdb78e
--- /dev/null
+++ b/test/addons-napi/test_make_callback/test.js
@@ -0,0 +1,83 @@
+'use strict';
+
+const common = require('../../common');
+const assert = require('assert');
+const vm = require('vm');
+const binding = require(`./build/${common.buildType}/binding`);
+const makeCallback = binding.makeCallback;
+
+function myMultiArgFunc(arg1, arg2, arg3) {
+ console.log(`MyFunc was called with ${arguments.length} arguments`);
+ assert.strictEqual(arg1, 1);
+ assert.strictEqual(arg2, 2);
+ assert.strictEqual(arg3, 3);
+ return 42;
+}
+
+assert.strictEqual(42, makeCallback(process, common.mustCall(function() {
+ assert.strictEqual(0, arguments.length);
+ assert.strictEqual(this, process);
+ return 42;
+})));
+
+assert.strictEqual(42, makeCallback(process, common.mustCall(function(x) {
+ assert.strictEqual(1, arguments.length);
+ assert.strictEqual(this, process);
+ assert.strictEqual(x, 1337);
+ return 42;
+}), 1337));
+
+assert.strictEqual(42,
+ makeCallback(this,
+ common.mustCall(myMultiArgFunc), 1, 2, 3));
+
+// TODO(node-api): napi_make_callback needs to support
+// strings passed for the func argument
+/*
+const recv = {
+ one: common.mustCall(function() {
+ assert.strictEqual(0, arguments.length);
+ assert.strictEqual(this, recv);
+ return 42;
+ }),
+ two: common.mustCall(function(x) {
+ assert.strictEqual(1, arguments.length);
+ assert.strictEqual(this, recv);
+ assert.strictEqual(x, 1337);
+ return 42;
+ }),
+};
+
+assert.strictEqual(42, makeCallback(recv, 'one'));
+assert.strictEqual(42, makeCallback(recv, 'two', 1337));
+
+// Check that callbacks on a receiver from a different context works.
+const foreignObject = vm.runInNewContext('({ fortytwo() { return 42; } })');
+assert.strictEqual(42, makeCallback(foreignObject, 'fortytwo'));
+*/
+
+// Check that the callback is made in the context of the receiver.
+const target = vm.runInNewContext(`
+ (function($Object) {
+ if (Object === $Object)
+ throw new Error('bad');
+ return Object;
+ })
+`);
+assert.notStrictEqual(Object, makeCallback(process, target, Object));
+
+// Runs in inner context.
+const forward = vm.runInNewContext(`
+ (function(forward) {
+ return forward(Object);
+ })
+`);
+
+// Runs in outer context.
+function endpoint($Object) {
+ if (Object === $Object)
+ throw new Error('bad');
+ return Object;
+}
+
+assert.strictEqual(Object, makeCallback(process, forward, endpoint));
diff --git a/test/addons-napi/test_make_callback_recurse/binding.cc b/test/addons-napi/test_make_callback_recurse/binding.cc
new file mode 100644
index 00000000000000..3f5a4c28b43524
--- /dev/null
+++ b/test/addons-napi/test_make_callback_recurse/binding.cc
@@ -0,0 +1,32 @@
+#include
+#include "../common.h"
+#include
+
+namespace {
+
+napi_value MakeCallback(napi_env env, napi_callback_info info) {
+ size_t argc = 2;
+ napi_value args[2];
+ NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL));
+
+ napi_value recv = args[0];
+ napi_value func = args[1];
+
+ napi_make_callback(env,
+ recv, func, 0 /* argc */, nullptr /* argv */, nullptr /* result */);
+
+ return recv;
+}
+
+void Init(napi_env env, napi_value exports, napi_value module, void* priv) {
+ napi_value fn;
+ NAPI_CALL_RETURN_VOID(env,
+ napi_create_function(env, NULL, MakeCallback, NULL, &fn));
+ NAPI_CALL_RETURN_VOID(env,
+ napi_set_named_property(env, exports, "makeCallback", fn));
+}
+
+
+} // namespace
+
+NAPI_MODULE(binding, Init)
diff --git a/test/addons-napi/test_make_callback_recurse/binding.gyp b/test/addons-napi/test_make_callback_recurse/binding.gyp
new file mode 100644
index 00000000000000..7ede63d94a0d77
--- /dev/null
+++ b/test/addons-napi/test_make_callback_recurse/binding.gyp
@@ -0,0 +1,9 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'binding',
+ 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ],
+ 'sources': [ 'binding.cc' ]
+ }
+ ]
+}
diff --git a/test/addons-napi/test_make_callback_recurse/test.js b/test/addons-napi/test_make_callback_recurse/test.js
new file mode 100644
index 00000000000000..895769bc33029a
--- /dev/null
+++ b/test/addons-napi/test_make_callback_recurse/test.js
@@ -0,0 +1,151 @@
+'use strict';
+
+const common = require('../../common');
+const assert = require('assert');
+const domain = require('domain');
+const binding = require(`./build/${common.buildType}/binding`);
+const makeCallback = binding.makeCallback;
+
+// Make sure this is run in the future.
+const mustCallCheckDomains = common.mustCall(checkDomains);
+
+
+// Make sure that using MakeCallback allows the error to propagate.
+assert.throws(function() {
+ makeCallback({}, function() {
+ throw new Error('hi from domain error');
+ });
+}, /^Error: hi from domain error$/);
+
+
+// Check the execution order of the nextTickQueue and MicrotaskQueue in
+// relation to running multiple MakeCallback's from bootstrap,
+// node::MakeCallback() and node::AsyncWrap::MakeCallback().
+// TODO(trevnorris): Is there a way to verify this is being run during
+// bootstrap?
+(function verifyExecutionOrder(arg) {
+ const results = [];
+
+ // Processing of the MicrotaskQueue is manually handled by node. They are not
+ // processed until after the nextTickQueue has been processed.
+ Promise.resolve(1).then(common.mustCall(function() {
+ results.push(7);
+ }));
+
+ // The nextTick should run after all immediately invoked calls.
+ process.nextTick(common.mustCall(function() {
+ results.push(3);
+
+ // Run same test again but while processing the nextTickQueue to make sure
+ // the following MakeCallback call breaks in the middle of processing the
+ // queue and allows the script to run normally.
+ process.nextTick(common.mustCall(function() {
+ results.push(6);
+ }));
+
+ makeCallback({}, common.mustCall(function() {
+ results.push(4);
+ }));
+
+ results.push(5);
+ }));
+
+ results.push(0);
+
+ // MakeCallback is calling the function immediately, but should then detect
+ // that a script is already in the middle of execution and return before
+ // either the nextTickQueue or MicrotaskQueue are processed.
+ makeCallback({}, common.mustCall(function() {
+ results.push(1);
+ }));
+
+ // This should run before either the nextTickQueue or MicrotaskQueue are
+ // processed. Previously MakeCallback would not detect this circumstance
+ // and process them immediately.
+ results.push(2);
+
+ setImmediate(common.mustCall(function() {
+ for (let i = 0; i < results.length; i++) {
+ assert.strictEqual(results[i], i,
+ `verifyExecutionOrder(${arg}) results: ${results}`);
+ }
+ if (arg === 1) {
+ // The tests are first run on bootstrap during LoadEnvironment() in
+ // src/node.cc. Now run the tests through node::MakeCallback().
+ setImmediate(function() {
+ makeCallback({}, common.mustCall(function() {
+ verifyExecutionOrder(2);
+ }));
+ });
+ } else if (arg === 2) {
+ // setTimeout runs via the TimerWrap, which runs through
+ // AsyncWrap::MakeCallback(). Make sure there are no conflicts using
+ // node::MakeCallback() within it.
+ setTimeout(common.mustCall(function() {
+ verifyExecutionOrder(3);
+ }), 10);
+ } else if (arg === 3) {
+ mustCallCheckDomains();
+ } else {
+ throw new Error('UNREACHABLE');
+ }
+ }));
+}(1));
+
+
+function checkDomains() {
+ // Check that domains are properly entered/exited when called in multiple
+ // levels from both node::MakeCallback() and AsyncWrap::MakeCallback
+ setImmediate(common.mustCall(function() {
+ const d1 = domain.create();
+ const d2 = domain.create();
+ const d3 = domain.create();
+
+ makeCallback({domain: d1}, common.mustCall(function() {
+ assert.strictEqual(d1, process.domain);
+ makeCallback({domain: d2}, common.mustCall(function() {
+ assert.strictEqual(d2, process.domain);
+ makeCallback({domain: d3}, common.mustCall(function() {
+ assert.strictEqual(d3, process.domain);
+ }));
+ assert.strictEqual(d2, process.domain);
+ }));
+ assert.strictEqual(d1, process.domain);
+ }));
+ }));
+
+ setTimeout(common.mustCall(function() {
+ const d1 = domain.create();
+ const d2 = domain.create();
+ const d3 = domain.create();
+
+ makeCallback({domain: d1}, common.mustCall(function() {
+ assert.strictEqual(d1, process.domain);
+ makeCallback({domain: d2}, common.mustCall(function() {
+ assert.strictEqual(d2, process.domain);
+ makeCallback({domain: d3}, common.mustCall(function() {
+ assert.strictEqual(d3, process.domain);
+ }));
+ assert.strictEqual(d2, process.domain);
+ }));
+ assert.strictEqual(d1, process.domain);
+ }));
+ }), 1);
+
+ function testTimer(id) {
+ // Make sure nextTick, setImmediate and setTimeout can all recover properly
+ // after a thrown makeCallback call.
+ const d = domain.create();
+ d.on('error', common.mustCall(function(e) {
+ assert.strictEqual(e.message, `throw from domain ${id}`);
+ }));
+ makeCallback({domain: d}, function() {
+ throw new Error(`throw from domain ${id}`);
+ });
+ throw new Error('UNREACHABLE');
+ }
+
+ process.nextTick(common.mustCall(testTimer), 3);
+ setImmediate(common.mustCall(testTimer), 2);
+ setTimeout(common.mustCall(testTimer), 1, 1);
+}
From ce03977f30b8959817cd4d160e63572d5ea32a58 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Micha=C3=ABl=20Zasso?=
Date: Fri, 5 May 2017 14:43:54 +0200
Subject: [PATCH 075/227] test: fix napi test_reference for recent V8
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12864
Ref: https://github.com/nodejs/node/pull/12551#issuecomment-297949361
Reviewed-By: James M Snell
Reviewed-By: Michael Dawson
---
test/addons-napi/test_reference/test.js | 29 ++++++++++++++-----------
1 file changed, 16 insertions(+), 13 deletions(-)
diff --git a/test/addons-napi/test_reference/test.js b/test/addons-napi/test_reference/test.js
index ddfec58f1f9d7d..30effe7eec0922 100644
--- a/test/addons-napi/test_reference/test.js
+++ b/test/addons-napi/test_reference/test.js
@@ -33,19 +33,6 @@ assert.strictEqual(test_reference.finalizeCount, 0);
assert.strictEqual(test_reference.finalizeCount, 1);
}
-{
- // Weak reference
- let value = test_reference.createExternalWithFinalize();
- assert.strictEqual(test_reference.finalizeCount, 0);
- test_reference.createReference(value, 0);
- assert.strictEqual(test_reference.referenceValue, value);
- value = null;
- global.gc(); // Value should be GC'd because there is only a weak ref
- assert.strictEqual(test_reference.referenceValue, undefined);
- assert.strictEqual(test_reference.finalizeCount, 1);
- test_reference.deleteReference();
-}
-
{
// Strong reference
let value = test_reference.createExternalWithFinalize();
@@ -85,3 +72,19 @@ assert.strictEqual(test_reference.finalizeCount, 0);
global.gc(); // Value was already GC'd
assert.strictEqual(test_reference.finalizeCount, 1);
}
+
+{
+ // Weak reference
+ let value = test_reference.createExternalWithFinalize();
+ assert.strictEqual(test_reference.finalizeCount, 0);
+ test_reference.createReference(value, 0);
+ assert.strictEqual(test_reference.referenceValue, value);
+ value = null;
+ setImmediate(common.mustCall(() => {
+ // This test only works if gc() is called from an immediate callback.
+ global.gc(); // Value should be GC'd because there is only a weak ref
+ assert.strictEqual(test_reference.referenceValue, undefined);
+ assert.strictEqual(test_reference.finalizeCount, 1);
+ test_reference.deleteReference();
+ }));
+}
From 2e36365d569b15f2859c88a488305577fe0fc907 Mon Sep 17 00:00:00 2001
From: Jason Ginchereau
Date: Fri, 5 May 2017 11:38:21 -0700
Subject: [PATCH 076/227] n-api: napi_get_cb_info should fill array
When the number of args requested is greater than the actual number of
args supplied to the function call, the remainder of the args array
should be filled in with `undefined` values. Because of this bug, the
remainder of the array was left uninitialized, which could cause a
crash.
Refer to the documentation for the `argv` parameter at
https://github.com/nodejs/node/blob/master/doc/api/n-api.md#napi_get_cb_info
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12863
Reviewed-By: Anna Henningsen
Reviewed-By: James M Snell
Reviewed-By: Michael Dawson
---
src/node_api.cc | 2 +-
test/addons-napi/3_callbacks/binding.c | 17 +++++++++++++++--
test/addons-napi/test_function/test_function.c | 2 +-
3 files changed, 17 insertions(+), 4 deletions(-)
diff --git a/src/node_api.cc b/src/node_api.cc
index 4113124b093419..2f8409fc81627c 100644
--- a/src/node_api.cc
+++ b/src/node_api.cc
@@ -1526,7 +1526,7 @@ napi_status napi_get_cb_info(
if (argv != nullptr) {
CHECK_ARG(env, argc);
- info->Args(argv, std::min(*argc, info->ArgsLength()));
+ info->Args(argv, *argc);
}
if (argc != nullptr) {
*argc = info->ArgsLength();
diff --git a/test/addons-napi/3_callbacks/binding.c b/test/addons-napi/3_callbacks/binding.c
index 47360bd979ffc4..8640a936107d47 100644
--- a/test/addons-napi/3_callbacks/binding.c
+++ b/test/addons-napi/3_callbacks/binding.c
@@ -3,10 +3,23 @@
#include
napi_value RunCallback(napi_env env, napi_callback_info info) {
- size_t argc = 1;
- napi_value args[1];
+ size_t argc = 2;
+ napi_value args[2];
NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL));
+ NAPI_ASSERT(env, argc == 1,
+ "Wrong number of arguments. Expects a single argument.");
+
+ napi_valuetype valuetype0;
+ NAPI_CALL(env, napi_typeof(env, args[0], &valuetype0));
+ NAPI_ASSERT(env, valuetype0 == napi_function,
+ "Wrong type of arguments. Expects a function as first argument.");
+
+ napi_valuetype valuetype1;
+ NAPI_CALL(env, napi_typeof(env, args[1], &valuetype1));
+ NAPI_ASSERT(env, valuetype1 == napi_undefined,
+ "Additional arguments should be undefined.");
+
napi_value argv[1];
const char* str = "hello world";
size_t str_len = strlen(str);
diff --git a/test/addons-napi/test_function/test_function.c b/test/addons-napi/test_function/test_function.c
index 928f99c184cb57..4ce0203e7232dd 100644
--- a/test/addons-napi/test_function/test_function.c
+++ b/test/addons-napi/test_function/test_function.c
@@ -12,7 +12,7 @@ napi_value Test(napi_env env, napi_callback_info info) {
NAPI_CALL(env, napi_typeof(env, args[0], &valuetype0));
NAPI_ASSERT(env, valuetype0 == napi_function,
- "Wrong type of arguments. Expects a number as first argument.");
+ "Wrong type of arguments. Expects a function as first argument.");
napi_value* argv = args + 1;
argc = argc - 1;
From a128219a484f74e4e4a5d3db23824fb9b5efb5cd Mon Sep 17 00:00:00 2001
From: Jason Ginchereau
Date: Thu, 4 May 2017 15:24:44 -0700
Subject: [PATCH 077/227] n-api: Handle fatal exception in async callback
- Create a handle scope before invoking the async completion
callback, because it is basically always needed, easy for user
code to forget, and this makes it more consistent with ordinary
N-API function callbacks.
- Check for an unhandled JS exception after invoking an async
completion callback, and report it via `node::FatalException()`.
- Add a corresponding test case for an exception in async callback.
Previously, any unhandled JS exception thrown from a
`napi_async_complete_callback` would be silently ignored. Among other
things this meant assertions in some test cases could be undetected.
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12838
Reviewed-By: Anna Henningsen
Reviewed-By: Benjamin Gruenbaum
Reviewed-By: Michael Dawson
Reviewed-By: James M Snell
---
doc/api/n-api.md | 7 -------
src/node_api.cc | 21 ++++++++++++++++++++-
test/addons-napi/test_async/test.js | 18 ++++++++++++++++++
test/addons-napi/test_async/test_async.cc | 19 -------------------
4 files changed, 38 insertions(+), 27 deletions(-)
diff --git a/doc/api/n-api.md b/doc/api/n-api.md
index f9e47a7b8d32a0..bc0499e4d09447 100644
--- a/doc/api/n-api.md
+++ b/doc/api/n-api.md
@@ -2855,13 +2855,6 @@ will be invoked with a status value of `napi_cancelled`.
The work should not be deleted before the `complete`
callback invocation, even when it was cancelled.
-**Note:** As mentioned in the section on memory management, if
-the code to be run in the callbacks will create N-API values, then
-N-API handle scope functions must be used to create/destroy a
-`napi_handle_scope` such that the scope is active when
-objects can be created.
-
-
### napi_create_async_work
-Node.js Addons are dynamically-linked shared objects, written in C or C++, that
+Node.js Addons are dynamically-linked shared objects, written in C++, that
can be loaded into Node.js using the [`require()`][require] function, and used
just as if they were an ordinary Node.js module. They are used primarily to
provide an interface between JavaScript running in Node.js and C/C++ libraries.
@@ -28,7 +28,7 @@ involving knowledge of several components and APIs :
off-loading work via libuv to non-blocking system operations, worker threads
or a custom use of libuv's threads.
- - Internal Node.js libraries. Node.js itself exports a number of C/C++ APIs
+ - Internal Node.js libraries. Node.js itself exports a number of C++ APIs
that Addons can use — the most important of which is the
`node::ObjectWrap` class.
diff --git a/doc/api/n-api.md b/doc/api/n-api.md
index 7530c0b1b4f4d2..348f2ad04466bd 100644
--- a/doc/api/n-api.md
+++ b/doc/api/n-api.md
@@ -12,7 +12,7 @@ compiled for one version to run on later versions of Node.js without
recompilation.
Addons are built/packaged with the same approach/tools
-outlined in the section titled [C/C++ Addons](addons.html).
+outlined in the section titled [C++ Addons](addons.html).
The only difference is the set of APIs that are used by the native code.
Instead of using the V8 or [Native Abstractions for Node.js][] APIs,
the functions available in the N-API are used.
From 263a633d5eef0dbf9724ece9861de8123000a56b Mon Sep 17 00:00:00 2001
From: Rich Trott
Date: Wed, 10 May 2017 19:37:25 -0700
Subject: [PATCH 081/227] test: add common.mustCall() to NAPI exception test
Use `common.mustCall()` to confirm that function is invoked.
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12959
Reviewed-By: Colin Ihrig
Reviewed-By: Sakthipriyan Vairamani
Reviewed-By: Gibson Fahnestock
Reviewed-By: Anna Henningsen
Reviewed-By: James M Snell
---
test/addons-napi/test_exception/test.js | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/test/addons-napi/test_exception/test.js b/test/addons-napi/test_exception/test.js
index 94f9566a4b341f..ddd1195fa7234a 100644
--- a/test/addons-napi/test_exception/test.js
+++ b/test/addons-napi/test_exception/test.js
@@ -9,8 +9,6 @@ function throwTheError() {
}
let caughtError;
-const throwNoError = common.noop;
-
// Test that the native side successfully captures the exception
let returnedError = test_exception.returnException(throwTheError);
assert.strictEqual(theError, returnedError,
@@ -34,13 +32,13 @@ assert.strictEqual(test_exception.wasPending(), true,
' when it was allowed through');
// Test that the native side does not capture a non-existing exception
-returnedError = test_exception.returnException(throwNoError);
+returnedError = test_exception.returnException(common.mustCall());
assert.strictEqual(undefined, returnedError,
'Returned error is undefined when no exception is thrown');
// Test that no exception appears that was not thrown by us
try {
- test_exception.allowException(throwNoError);
+ test_exception.allowException(common.mustCall());
} catch (anError) {
caughtError = anError;
}
From 0f74ee5cbf1faffcb986da37791fbbece28a9b7b Mon Sep 17 00:00:00 2001
From: Michael Dawson
Date: Thu, 11 May 2017 09:53:35 -0400
Subject: [PATCH 082/227] doc: clarify operation of napi_cancel_async_work
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/12974
Reviewed-By: Gibson Fahnestock
Reviewed-By: Sakthipriyan Vairamani
Reviewed-By: Colin Ihrig
Reviewed-By: James M Snell
Reviewed-By: Jason Ginchereau
---
doc/api/n-api.md | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/doc/api/n-api.md b/doc/api/n-api.md
index 348f2ad04466bd..423e71859dea94 100644
--- a/doc/api/n-api.md
+++ b/doc/api/n-api.md
@@ -2931,11 +2931,12 @@ NAPI_EXTERN napi_status napi_cancel_async_work(napi_env env,
Returns `napi_ok` if the API succeeded.
-This API cancels a previously allocated work, provided
-it has not yet been queued for execution. After this function is called
+This API cancels queued work if it has not yet
+been started. If it has already started executing, it cannot be
+cancelled and `napi_generic_failure` will be returned. If successful,
the `complete` callback will be invoked with a status value of
`napi_cancelled`. The work should not be deleted before the `complete`
-callback invocation, even when it was cancelled.
+callback invocation, even if it has been successfully cancelled.
[Aynchronous Operations]: #n_api_asynchronous_operations
From 314f22dcf4e5dc864cddd98f08274080601f0a88 Mon Sep 17 00:00:00 2001
From: Michael Dawson
Date: Fri, 12 May 2017 17:26:38 -0400
Subject: [PATCH 083/227] test: improve N-API test coverage
Add tests to cover functions that return globals
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/13006
Reviewed-By: Colin Ihrig
Reviewed-By: Anna Henningsen
Reviewed-By: Jason Ginchereau
---
test/addons-napi/test_globals/binding.gyp | 8 ++++++
test/addons-napi/test_globals/test.js | 8 ++++++
test/addons-napi/test_globals/test_globals.c | 26 ++++++++++++++++++++
3 files changed, 42 insertions(+)
create mode 100644 test/addons-napi/test_globals/binding.gyp
create mode 100644 test/addons-napi/test_globals/test.js
create mode 100644 test/addons-napi/test_globals/test_globals.c
diff --git a/test/addons-napi/test_globals/binding.gyp b/test/addons-napi/test_globals/binding.gyp
new file mode 100644
index 00000000000000..0160dc72e18017
--- /dev/null
+++ b/test/addons-napi/test_globals/binding.gyp
@@ -0,0 +1,8 @@
+{
+ "targets": [
+ {
+ "target_name": "test_globals",
+ "sources": [ "test_globals.c" ]
+ }
+ ]
+}
diff --git a/test/addons-napi/test_globals/test.js b/test/addons-napi/test_globals/test.js
new file mode 100644
index 00000000000000..a6e5f722cb9379
--- /dev/null
+++ b/test/addons-napi/test_globals/test.js
@@ -0,0 +1,8 @@
+'use strict';
+const common = require('../../common');
+const assert = require('assert');
+
+const test_globals = require(`./build/${common.buildType}/test_globals`);
+
+assert.strictEqual(test_globals.getUndefined(), undefined);
+assert.strictEqual(test_globals.getNull(), null);
diff --git a/test/addons-napi/test_globals/test_globals.c b/test/addons-napi/test_globals/test_globals.c
new file mode 100644
index 00000000000000..709e42a2e0e6c8
--- /dev/null
+++ b/test/addons-napi/test_globals/test_globals.c
@@ -0,0 +1,26 @@
+#include
+#include "../common.h"
+
+napi_value getNull(napi_env env, napi_callback_info info) {
+ napi_value result;
+ NAPI_CALL(env, napi_get_null(env, &result));
+ return result;
+}
+
+napi_value getUndefined(napi_env env, napi_callback_info info) {
+ napi_value result;
+ NAPI_CALL(env, napi_get_undefined(env, &result));
+ return result;
+}
+
+void Init(napi_env env, napi_value exports, napi_value module, void* priv) {
+ napi_property_descriptor descriptors[] = {
+ DECLARE_NAPI_PROPERTY("getUndefined", getUndefined),
+ DECLARE_NAPI_PROPERTY("getNull", getNull),
+ };
+
+ NAPI_CALL_RETURN_VOID(env, napi_define_properties(
+ env, exports, sizeof(descriptors) / sizeof(*descriptors), descriptors));
+}
+
+NAPI_MODULE(addon, Init)
From 8d3162d9e68b6ae30addde5d0c5e1566e21c7ce6 Mon Sep 17 00:00:00 2001
From: Anna Henningsen
Date: Sat, 13 May 2017 17:36:23 +0200
Subject: [PATCH 084/227] n-api: remove compiler warning
`TryCatch` without an `Isolate*` argument is deprecated, so add one.
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/13014
Reviewed-By: Timothy Gu
Reviewed-By: Gibson Fahnestock
Reviewed-By: Colin Ihrig
Reviewed-By: Michael Dawson
Reviewed-By: Jason Ginchereau
---
src/node_api.cc | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/node_api.cc b/src/node_api.cc
index 8600783523c7a5..b0900d1acd12bb 100644
--- a/src/node_api.cc
+++ b/src/node_api.cc
@@ -2783,7 +2783,7 @@ class Work {
// report it as a fatal exception. (There is no JavaScript on the
// callstack that can possibly handle it.)
if (!env->last_exception.IsEmpty()) {
- v8::TryCatch try_catch;
+ v8::TryCatch try_catch(env->isolate);
env->isolate->ThrowException(
v8::Local::New(env->isolate, env->last_exception));
node::FatalException(env->isolate, try_catch);
From 71aa251671dfb78d82315b9fc11b32a12b66dea8 Mon Sep 17 00:00:00 2001
From: Michael Dawson
Date: Mon, 15 May 2017 20:18:50 -0400
Subject: [PATCH 085/227] test: Improve N-API test coverage
- add coverage for napi_get_prototype
- add coverage for napi_strict_equals
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/13044
Reviewed-By: Colin Ihrig
Reviewed-By: Jason Ginchereau
---
test/addons-napi/test_general/binding.gyp | 8 +++++
test/addons-napi/test_general/test.js | 32 +++++++++++++++++
test/addons-napi/test_general/test_general.c | 38 ++++++++++++++++++++
3 files changed, 78 insertions(+)
create mode 100644 test/addons-napi/test_general/binding.gyp
create mode 100644 test/addons-napi/test_general/test.js
create mode 100644 test/addons-napi/test_general/test_general.c
diff --git a/test/addons-napi/test_general/binding.gyp b/test/addons-napi/test_general/binding.gyp
new file mode 100644
index 00000000000000..f8ef9f59613355
--- /dev/null
+++ b/test/addons-napi/test_general/binding.gyp
@@ -0,0 +1,8 @@
+{
+ "targets": [
+ {
+ "target_name": "test_general",
+ "sources": [ "test_general.c" ]
+ }
+ ]
+}
diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js
new file mode 100644
index 00000000000000..a2e57126ce8c22
--- /dev/null
+++ b/test/addons-napi/test_general/test.js
@@ -0,0 +1,32 @@
+'use strict';
+
+const common = require('../../common');
+const test_general = require(`./build/${common.buildType}/test_general`);
+const assert = require('assert');
+
+const val1 = '1';
+const val2 = 1;
+const val3 = 1;
+
+class BaseClass {
+}
+
+class ExtendedClass extends BaseClass {
+}
+
+const baseObject = new BaseClass();
+const extendedObject = new ExtendedClass();
+
+// test napi_strict_equals
+assert.ok(test_general.testStrictEquals(val1, val1));
+assert.strictEqual(test_general.testStrictEquals(val1, val2), false);
+assert.ok(test_general.testStrictEquals(val2, val3));
+
+// test napi_get_prototype
+assert.strictEqual(test_general.testGetPrototype(baseObject),
+ Object.getPrototypeOf(baseObject));
+assert.strictEqual(test_general.testGetPrototype(extendedObject),
+ Object.getPrototypeOf(extendedObject));
+assert.ok(test_general.testGetPrototype(baseObject) !==
+ test_general.testGetPrototype(extendedObject),
+ 'Prototypes for base and extended should be different');
diff --git a/test/addons-napi/test_general/test_general.c b/test/addons-napi/test_general/test_general.c
new file mode 100644
index 00000000000000..17911b36253df1
--- /dev/null
+++ b/test/addons-napi/test_general/test_general.c
@@ -0,0 +1,38 @@
+#include
+#include "../common.h"
+
+napi_value testStrictEquals(napi_env env, napi_callback_info info) {
+ size_t argc = 2;
+ napi_value args[2];
+ NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL));
+
+ bool bool_result;
+ napi_value result;
+ NAPI_CALL(env, napi_strict_equals(env, args[0], args[1], &bool_result));
+ NAPI_CALL(env, napi_get_boolean(env, bool_result, &result));
+
+ return result;
+}
+
+napi_value testGetPrototype(napi_env env, napi_callback_info info) {
+ size_t argc = 1;
+ napi_value args[1];
+ NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL));
+
+ napi_value result;
+ NAPI_CALL(env, napi_get_prototype(env, args[0], &result));
+
+ return result;
+}
+
+void Init(napi_env env, napi_value exports, napi_value module, void* priv) {
+ napi_property_descriptor descriptors[] = {
+ DECLARE_NAPI_PROPERTY("testStrictEquals", testStrictEquals),
+ DECLARE_NAPI_PROPERTY("testGetPrototype", testGetPrototype),
+ };
+
+ NAPI_CALL_RETURN_VOID(env, napi_define_properties(
+ env, exports, sizeof(descriptors) / sizeof(*descriptors), descriptors));
+}
+
+NAPI_MODULE(addon, Init)
From 70281ba1be8d4bc067ac9ec6b5a50f0936eb06fc Mon Sep 17 00:00:00 2001
From: Jason Ginchereau
Date: Wed, 17 May 2017 16:56:37 -0700
Subject: [PATCH 086/227] n-api: Retain last code when getting error info
Unlike most N-API functions, `napi_get_last_error_info()` should not
clear the last error code when successful, because a pointer to (not
a copy of) the error info structure is returned via an out parameter.
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/13087
Reviewed-By: Anna Henningsen
Reviewed-By: Colin Ihrig
Reviewed-By: Michael Dawson
---
src/node_api.cc | 2 +-
test/addons-napi/test_napi_status/test_napi_status.cc | 8 ++++++++
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/src/node_api.cc b/src/node_api.cc
index b0900d1acd12bb..976bf15f3ca423 100644
--- a/src/node_api.cc
+++ b/src/node_api.cc
@@ -757,7 +757,7 @@ napi_status napi_get_last_error_info(napi_env env,
error_messages[env->last_error.error_code];
*result = &(env->last_error);
- return napi_clear_last_error(env);
+ return napi_ok;
}
napi_status napi_create_function(napi_env env,
diff --git a/test/addons-napi/test_napi_status/test_napi_status.cc b/test/addons-napi/test_napi_status/test_napi_status.cc
index 9046feffd4a15d..9e340aa46e106c 100644
--- a/test/addons-napi/test_napi_status/test_napi_status.cc
+++ b/test/addons-napi/test_napi_status/test_napi_status.cc
@@ -10,6 +10,14 @@ napi_value createNapiError(napi_env env, napi_callback_info info) {
NAPI_ASSERT(env, status != napi_ok, "Failed to produce error condition");
+ const napi_extended_error_info *error_info = 0;
+ NAPI_CALL(env, napi_get_last_error_info(env, &error_info));
+
+ NAPI_ASSERT(env, error_info->error_code == status,
+ "Last error info code should match last status");
+ NAPI_ASSERT(env, error_info->error_message,
+ "Last error info message should not be null");
+
return nullptr;
}
From 75e91fe5c8c386ced5addab9cb55326a4e743ba9 Mon Sep 17 00:00:00 2001
From: Michael Dawson
Date: Wed, 17 May 2017 17:16:37 -0400
Subject: [PATCH 087/227] doc: add reference to node_api.h in docs
Realized that we don't actually point people to the file to
include in order to access N-API functions. Add that.
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/13084
Reviewed-By: Colin Ihrig
Reviewed-By: Gibson Fahnestock
Reviewed-By: Luigi Pinca
Reviewed-By: Anna Henningsen
---
doc/api/n-api.md | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/doc/api/n-api.md b/doc/api/n-api.md
index 423e71859dea94..de841331aba0ad 100644
--- a/doc/api/n-api.md
+++ b/doc/api/n-api.md
@@ -52,6 +52,14 @@ for the N-API C based functions exported by Node.js. These wrappers are not
part of N-API, nor will they be maintained as part of Node.js. One such
example is: [node-api](https://github.com/nodejs/node-api).
+In order to use the N-API functions, include the file
+[node_api.h](https://github.com/nodejs/node/blob/master/src/node_api.h)
+which is located in the src directory in the node development tree.
+For example:
+```C
+#include
+```
+
## Basic N-API Data Types
N-API exposes the following fundamental datatypes as abstractions that are
From 2e2905266e5f6c5a63c521f8cad473b9d82f6b1c Mon Sep 17 00:00:00 2001
From: Michael Dawson
Date: Fri, 19 May 2017 17:34:11 -0400
Subject: [PATCH 088/227] doc: fix title/function name mismatch
Fix mismatch in title for napi_get_value_string_utf16
Fixes: https://github.com/nodejs/abi-stable-node/issues/243
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/13123
Reviewed-By: Anna Henningsen
Reviewed-By: Refael Ackermann
Reviewed-By: James M Snell
Reviewed-By: Luigi Pinca
Reviewed-By: Gibson Fahnestock
Reviewed-By: Colin Ihrig
---
doc/api/n-api.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/api/n-api.md b/doc/api/n-api.md
index de841331aba0ad..d3efd09ab15ec0 100644
--- a/doc/api/n-api.md
+++ b/doc/api/n-api.md
@@ -1590,7 +1590,7 @@ x is passed in it returns `napi_string_expected`.
This API returns the UTF8-encoded string corresponding the value passed in.
-#### *napi_get_value_string_utf16_length*
+#### *napi_get_value_string_utf16*
From d89afe868593ffe75c4e7f8974ecd779bb28af06 Mon Sep 17 00:00:00 2001
From: Michael Dawson
Date: Fri, 19 May 2017 18:18:54 -0400
Subject: [PATCH 089/227] test: increase n-api constructor coverage
Add tests to validate that properties marked as static
are available through the class as opposed to instances
Backport-PR-URL: https://github.com/nodejs/node/pull/19447
PR-URL: https://github.com/nodejs/node/pull/13124
Reviewed-By: Jason Ginchereau