From a2c0ebd3b54ef036759f6b230a4bc55848c094f9 Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Sat, 10 Sep 2016 13:09:46 +0200 Subject: [PATCH] fix: lock source to ensure no double reads happen When the source is read very fast while the file descriptor is still being opened it can come to double reads of chunks of the file. This introduces a lock to avoid this issue. --- index.js | 17 +++++++++++++---- package.json | 3 ++- test/largefile.js | 35 +++++++++++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 5 deletions(-) diff --git a/index.js b/index.js index 62fbcc5..113053e 100644 --- a/index.js +++ b/index.js @@ -4,6 +4,7 @@ var fs = require('fs'); var Decoder = require('pull-utf8-decoder') +var Lock = require('lock') /** # pull-file @@ -25,6 +26,7 @@ module.exports = function(filename, opts) { var ended, closeNext, busy; var _buffer = new Buffer(bufferSize) + var lock = Lock() var live = opts && !!opts.live var liveCb, closeCb var watcher @@ -78,10 +80,12 @@ module.exports = function(filename, opts) { return cb(err); } - if(count === buffer.length) { + if(count === toRead && count === buffer.length) { cb(null, buffer); } else if(count === 0 && live) { liveCb = cb; closeNext = true + } else if (count === 0) { + cb(ended = true) } else { closeNext = true; cb(null, buffer.slice(0, count)); @@ -161,11 +165,16 @@ module.exports = function(filename, opts) { readNext(cb); }; + var lockedSource = function (end, cb) { + lock('source', function (release) { + source(end, release(cb)) + }) + } + //read directly to text if(opts && opts.encoding) - return Decoder(opts.encoding)(source) + return Decoder(opts.encoding)(lockedSource) - return source + return lockedSource }; - diff --git a/package.json b/package.json index 7fd0a2d..361a8c9 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ }, "homepage": "https://github.com/DamonOehlman/pull-file", "dependencies": { + "lock": "^0.1.3", "pull-utf8-decoder": "^1.0.2" } -} \ No newline at end of file +} diff --git a/test/largefile.js b/test/largefile.js index 1ac116e..a10988a 100644 --- a/test/largefile.js +++ b/test/largefile.js @@ -43,8 +43,43 @@ test('large file as ascii strings', function(t) { ); }); +test('large file fast reads', function(t) { + var big = crypto.pseudoRandomBytes(10*1024*1024) + fs.writeFileSync(tmpfile, big) + var read = file(tmpfile, {bufferSize: 1024 * 1024}) + var items = [] + read(null, checkRead) + read(null, checkRead) + read(null, checkRead) + read(null, checkRead) + read(null, checkRead) + setTimeout(function () { + read(null, checkRead) + read(null, checkRead) + read(null, checkRead) + read(null, checkRead) + read(null, checkRead) + read(null, function (err, data) { + t.equal(err, true) + t.equal(data, undefined) + next() + }) + }, 10) + function checkRead (err, data) { + t.equal(data.length, 1024 * 1024) + items.push(data) + next() + } + var i = 0 + function next () { + if (++i === 11) { + t.equal(hash(big), hash(Buffer.concat(items))) + t.end() + } + } +})