Skip to content

Commit

Permalink
Support passing ReadEntry objects to Pack
Browse files Browse the repository at this point in the history
  • Loading branch information
isaacs committed May 12, 2017
1 parent 6f33e01 commit 0d9c2fb
Show file tree
Hide file tree
Showing 2 changed files with 197 additions and 30 deletions.
100 changes: 70 additions & 30 deletions lib/pack.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ class PackJob {
constructor (path, absolute) {
this.path = path || './'
this.absolute = absolute
this.entry = null
this.stat = null
this.readdir = null
this.pending = false
Expand All @@ -23,8 +24,10 @@ class PackJob {

const MiniPass = require('minipass')
const zlib = require('minizlib')
const ReadEntry = require('./read-entry.js')
const WriteEntry = require('./write-entry.js')
const WriteEntrySync = WriteEntry.Sync
const WriteEntryTar = WriteEntry.Tar
const Yallist = require('yallist')
const EOF = Buffer.alloc(1024)
const ONSTAT = Symbol('onStat')
Expand All @@ -36,12 +39,14 @@ const PROCESSING = Symbol('processing')
const PROCESSJOB = Symbol('processJob')
const JOBS = Symbol('jobs')
const JOBDONE = Symbol('jobDone')
const ADDENTRY = Symbol('addEntry')
const ADDFSENTRY = Symbol('addFSEntry')
const ADDTARENTRY = Symbol('addTarEntry')
const STAT = Symbol('stat')
const READDIR = Symbol('readdir')
const ONREADDIR = Symbol('onreaddir')
const PIPE = Symbol('pipe')
const ENTRY = Symbol('entry')
const ENTRYOPT = Symbol('entryOpt')
const WRITEENTRYCLASS = Symbol('writeEntryClass')
const WRITE = Symbol('write')
const ONDRAIN = Symbol('ondrain')
Expand Down Expand Up @@ -114,11 +119,33 @@ const Pack = warner(class Pack extends MiniPass {
if (this[ENDED])
throw new Error('write after end')

this[ADDENTRY](path)
if (path instanceof ReadEntry)
this[ADDTARENTRY](path)
else
this[ADDFSENTRY](path)
return this.flowing
}

[ADDENTRY] (p) {
[ADDTARENTRY] (p) {
const absolute = path.resolve(this.cwd, p.path)
if (this.prefix)
p.path = this.prefix + '/' + p.path

// in this case, we don't have to wait for the stat
if (!this.filter(p.path, p))
p.resume()
else {
const job = new PackJob(p.path, absolute, false)
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
job.entry.on('end', _ => this[JOBDONE](job))
this[JOBS] += 1
this[QUEUE].push(job)
}

this[PROCESS]()
}

[ADDFSENTRY] (p) {
const absolute = path.resolve(this.cwd, p)
if (this.prefix)
p = this.prefix + '/' + p
Expand All @@ -134,8 +161,9 @@ const Pack = warner(class Pack extends MiniPass {
job.pending = false
this[JOBS] -= 1
if (er)
return this.emit('error', er)
this[ONSTAT](job, stat)
this.emit('error', er)
else
this[ONSTAT](job, stat)
})
}

Expand Down Expand Up @@ -177,7 +205,13 @@ const Pack = warner(class Pack extends MiniPass {
w !== null && this[JOBS] < this.jobs;
w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
this[QUEUE].removeNode(w)
w.next = p
}
}

this[PROCESSING] = false

if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
Expand All @@ -204,6 +238,12 @@ const Pack = warner(class Pack extends MiniPass {
if (job.pending)
return

if (job.entry) {
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
return
}

if (!job.stat) {
if (this.statCache.has(job.absolute))
this[ONSTAT](job, this.statCache.get(job.absolute))
Expand All @@ -214,11 +254,8 @@ const Pack = warner(class Pack extends MiniPass {
return

// filtered out!
if (job.ignore) {
if (job === this[CURRENT])
this[QUEUE].shift()
if (job.ignore)
return
}

if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
if (this.readdirCache.has(job.absolute))
Expand All @@ -229,35 +266,38 @@ const Pack = warner(class Pack extends MiniPass {
return
}

// we know it doesn't have an entry, because that got checked above
job.entry = this[ENTRY](job)
if (!job.entry) {
job.entry = this[ENTRY](job)
if (!job.entry) {
job.ignore = true
return
}
job.ignore = true
return
}

if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
}

[ENTRYOPT] (job) {
return {
onwarn: (msg, data) => {
this.warn(msg, data)
},
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache
}
}

[ENTRY] (job) {
this[JOBS] += 1
try {
return new this[WRITEENTRYCLASS](job.path, {
onwarn: (msg, data) => {
this.warn(msg, data)
},
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache
}).on('end', _ => {
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)).on('end', _ => {
this[JOBDONE](job)
})
} catch (er) {
Expand All @@ -277,7 +317,7 @@ const Pack = warner(class Pack extends MiniPass {
if (job.readdir)
job.readdir.forEach(entry => {
const base = job.path === './' ? '' : job.path.replace(/\/*$/, '/')
this[ADDENTRY](base + entry)
this[ADDFSENTRY](base + entry)
})

const source = job.entry
Expand Down Expand Up @@ -328,7 +368,7 @@ class PackSync extends Pack {

if (job.readdir)
job.readdir.forEach(entry => {
this[ADDENTRY](job.path + '/' + entry)
this[ADDFSENTRY](job.path + '/' + entry)
})

if (zip)
Expand Down
127 changes: 127 additions & 0 deletions test/pack.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ process.env.USER = 'isaacs'
const EE = require('events').EventEmitter
const rimraf = require('rimraf')
const mkdirp = require('mkdirp')
const ReadEntry = require('../lib/read-entry.js')

const ctime = new Date('2017-05-10T01:03:12.000Z')
const atime = new Date('2017-04-17T00:00:00.000Z')
Expand Down Expand Up @@ -823,3 +824,129 @@ t.test('follow', t => {

t.end()
})

t.test('pack ReadEntries', t => {
t.test('basic', t => {
const readEntry = new ReadEntry(new Header({
path: 'x',
type: 'File',
size: 1
}))
const p = new Pack()
p.end(readEntry)
const out = []
p.on('data', c => out.push(c))
p.on('end', _ => {
const data = Buffer.concat(out)
t.equal(data.length, 2048)
t.match(data.slice(1024).toString(), /^\0+$/)
t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), 'x')
t.equal(data.slice(512, 514).toString(), 'x\0')
t.end()
})
const buf = Buffer.alloc(512)
buf.write('x')
readEntry.end(buf)
})

t.test('prefix', t => {
const readEntry = new ReadEntry(new Header({
path: 'x',
type: 'File',
size: 1
}))
const p = new Pack({ prefix: 'y' })
p.end(readEntry)
const out = []
p.on('data', c => out.push(c))
p.on('end', _ => {
const data = Buffer.concat(out)
t.equal(data.length, 2048)
t.match(data.slice(1024).toString(), /^\0+$/)
t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), 'y/x')
t.equal(data.slice(512, 514).toString(), 'x\0')
t.end()
})
const buf = Buffer.alloc(512)
buf.write('x')
readEntry.end(buf)
})

t.test('filter out', t => {
const re1 = new ReadEntry(new Header({
path: 'a',
type: 'File',
size: 1
}))
const re2 = new ReadEntry(new Header({
path: 'x',
type: 'File',
size: 1
}))
const re3 = new ReadEntry(new Header({
path: 'y',
type: 'File',
size: 1
}))
const p = new Pack({ filter: p => p === 'x' })
p.add(re1)
p.add(re2)
p.end(re3)
const out = []
p.on('data', c => out.push(c))
p.on('end', _ => {
const data = Buffer.concat(out)
t.equal(data.length, 2048)
t.match(data.slice(1024).toString(), /^\0+$/)
t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), 'x')
t.equal(data.slice(512, 514).toString(), 'x\0')
t.end()
})
{
const buf = Buffer.alloc(512)
buf.write('x')
re1.end(buf)
}
{
const buf = Buffer.alloc(512)
buf.write('x')
re2.end(buf)
}
{
const buf = Buffer.alloc(512)
buf.write('x')
re3.end(buf)
}
})

t.end()
})

t.test('filter out everything', t => {
const filter = _ => false

const check = (out, t) => {
const data = Buffer.concat(out)
t.equal(data.length, 1024)
t.match(data.toString(), /^\0+$/)
t.end()
}

t.test('sync', t => {
const out = []
const p = new Pack.Sync({ cwd: files, filter: filter })
p.on('data', c => out.push(c))
p.end('./')
check(out, t)
})

t.test('async', t => {
const out = []
const p = new Pack.Sync({ cwd: files, filter: filter })
p.on('data', c => out.push(c))
p.on('end', _ => check(out, t))
p.end('./')
})

t.end()
})

0 comments on commit 0d9c2fb

Please sign in to comment.