This commit is contained in:
2019-02-17 18:07:28 +01:00
parent c69ffb9752
commit 95cff6f702
2301 changed files with 307810 additions and 5 deletions

5
Client/node_modules/tar/.npmignore generated vendored Executable file
View File

@@ -0,0 +1,5 @@
.*.swp
node_modules
examples/extract/
test/tmp/
test/fixtures/

4
Client/node_modules/tar/.travis.yml generated vendored Executable file
View File

@@ -0,0 +1,4 @@
language: node_js
node_js:
- 0.10
- 0.11

12
Client/node_modules/tar/LICENSE generated vendored Executable file
View File

@@ -0,0 +1,12 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

50
Client/node_modules/tar/README.md generated vendored Executable file
View File

@@ -0,0 +1,50 @@
# node-tar
Tar for Node.js.
[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/)
## API
See `examples/` for usage examples.
### var tar = require('tar')
Returns an object with `.Pack`, `.Extract` and `.Parse` methods.
### tar.Pack([properties])
Returns a through stream. Use
[fstream](https://npmjs.org/package/fstream) to write files into the
pack stream and you will receive tar archive data from the pack
stream.
This only works with directories, it does not work with individual files.
The optional `properties` object are used to set properties in the tar
'Global Extended Header'. If the `fromBase` property is set to true,
the tar will contain files relative to the path passed, and not with
the path included.
### tar.Extract([options])
Returns a through stream. Write tar data to the stream and the files
in the tarball will be extracted onto the filesystem.
`options` can be:
```js
{
path: '/path/to/extract/tar/into',
strip: 0, // how many path segments to strip from the root when extracting
}
```
`options` also get passed to the `fstream.Writer` instance that `tar`
uses internally.
### tar.Parse()
Returns a writable stream. Write tar data to it and it will emit
`entry` events for each entry parsed from the tarball. This is used by
`tar.Extract`.

19
Client/node_modules/tar/examples/extracter.js generated vendored Executable file
View File

@@ -0,0 +1,19 @@
var tar = require("../tar.js")
, fs = require("fs")
function onError(err) {
console.error('An error occurred:', err)
}
function onEnd() {
console.log('Extracted!')
}
var extractor = tar.Extract({path: __dirname + "/extract"})
.on('error', onError)
.on('end', onEnd);
fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
.on('error', onError)
.pipe(extractor);

24
Client/node_modules/tar/examples/packer.js generated vendored Executable file
View File

@@ -0,0 +1,24 @@
var tar = require("../tar.js")
, fstream = require("fstream")
, fs = require("fs")
var dirDest = fs.createWriteStream('dir.tar')
function onError(err) {
console.error('An error occurred:', err)
}
function onEnd() {
console.log('Packed!')
}
var packer = tar.Pack({ noProprietary: true })
.on('error', onError)
.on('end', onEnd);
// This must be a "directory"
fstream.Reader({ path: __dirname, type: "Directory" })
.on('error', onError)
.pipe(packer)
.pipe(dirDest)

36
Client/node_modules/tar/examples/reader.js generated vendored Executable file
View File

@@ -0,0 +1,36 @@
var tar = require("../tar.js")
, fs = require("fs")
fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
.pipe(tar.Parse())
.on("extendedHeader", function (e) {
console.error("extended pax header", e.props)
e.on("end", function () {
console.error("extended pax fields:", e.fields)
})
})
.on("ignoredEntry", function (e) {
console.error("ignoredEntry?!?", e.props)
})
.on("longLinkpath", function (e) {
console.error("longLinkpath entry", e.props)
e.on("end", function () {
console.error("value=%j", e.body.toString())
})
})
.on("longPath", function (e) {
console.error("longPath entry", e.props)
e.on("end", function () {
console.error("value=%j", e.body.toString())
})
})
.on("entry", function (e) {
console.error("entry", e.props)
e.on("data", function (c) {
console.error(" >>>" + c.toString().replace(/\n/g, "\\n"))
})
e.on("end", function () {
console.error(" <<<EOF")
})
})

30
Client/node_modules/tar/lib/buffer-entry.js generated vendored Executable file
View File

@@ -0,0 +1,30 @@
// just like the Entry class, but it buffers the contents
//
// XXX It would be good to set a maximum BufferEntry filesize,
// since it eats up memory. In normal operation,
// these are only for long filenames or link names, which are
// rarely very big.
module.exports = BufferEntry
var inherits = require("inherits")
, Entry = require("./entry.js")
function BufferEntry () {
Entry.apply(this, arguments)
this._buffer = new Buffer(this.props.size)
this._offset = 0
this.body = ""
this.on("end", function () {
this.body = this._buffer.toString().slice(0, -1)
})
}
inherits(BufferEntry, Entry)
// collect the bytes as they come in.
BufferEntry.prototype.write = function (c) {
c.copy(this._buffer, this._offset)
this._offset += c.length
Entry.prototype.write.call(this, c)
}

169
Client/node_modules/tar/lib/entry-writer.js generated vendored Executable file
View File

@@ -0,0 +1,169 @@
module.exports = EntryWriter
var tar = require("../tar.js")
, TarHeader = require("./header.js")
, Entry = require("./entry.js")
, inherits = require("inherits")
, BlockStream = require("block-stream")
, ExtendedHeaderWriter
, Stream = require("stream").Stream
, EOF = {}
inherits(EntryWriter, Stream)
function EntryWriter (props) {
var me = this
if (!(me instanceof EntryWriter)) {
return new EntryWriter(props)
}
Stream.apply(this)
me.writable = true
me.readable = true
me._stream = new BlockStream(512)
me._stream.on("data", function (c) {
me.emit("data", c)
})
me._stream.on("drain", function () {
me.emit("drain")
})
me._stream.on("end", function () {
me.emit("end")
me.emit("close")
})
me.props = props
if (props.type === "Directory") {
props.size = 0
}
props.ustar = "ustar\0"
props.ustarver = "00"
me.path = props.path
me._buffer = []
me._didHeader = false
me._meta = false
me.on("pipe", function () {
me._process()
})
}
EntryWriter.prototype.write = function (c) {
// console.error(".. ew write")
if (this._ended) return this.emit("error", new Error("write after end"))
this._buffer.push(c)
this._process()
this._needDrain = this._buffer.length > 0
return !this._needDrain
}
EntryWriter.prototype.end = function (c) {
// console.error(".. ew end")
if (c) this._buffer.push(c)
this._buffer.push(EOF)
this._ended = true
this._process()
this._needDrain = this._buffer.length > 0
}
EntryWriter.prototype.pause = function () {
// console.error(".. ew pause")
this._paused = true
this.emit("pause")
}
EntryWriter.prototype.resume = function () {
// console.error(".. ew resume")
this._paused = false
this.emit("resume")
this._process()
}
EntryWriter.prototype.add = function (entry) {
// console.error(".. ew add")
if (!this.parent) return this.emit("error", new Error("no parent"))
// make sure that the _header and such is emitted, and clear out
// the _currentEntry link on the parent.
if (!this._ended) this.end()
return this.parent.add(entry)
}
EntryWriter.prototype._header = function () {
// console.error(".. ew header")
if (this._didHeader) return
this._didHeader = true
var headerBlock = TarHeader.encode(this.props)
if (this.props.needExtended && !this._meta) {
var me = this
ExtendedHeaderWriter = ExtendedHeaderWriter ||
require("./extended-header-writer.js")
ExtendedHeaderWriter(this.props)
.on("data", function (c) {
me.emit("data", c)
})
.on("error", function (er) {
me.emit("error", er)
})
.end()
}
// console.error(".. .. ew headerBlock emitting")
this.emit("data", headerBlock)
this.emit("header")
}
EntryWriter.prototype._process = function () {
// console.error(".. .. ew process")
if (!this._didHeader && !this._meta) {
this._header()
}
if (this._paused || this._processing) {
// console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing)
return
}
this._processing = true
var buf = this._buffer
for (var i = 0; i < buf.length; i ++) {
// console.error(".. .. .. i=%d", i)
var c = buf[i]
if (c === EOF) this._stream.end()
else this._stream.write(c)
if (this._paused) {
// console.error(".. .. .. paused mid-emission")
this._processing = false
if (i < buf.length) {
this._needDrain = true
this._buffer = buf.slice(i + 1)
}
return
}
}
// console.error(".. .. .. emitted")
this._buffer.length = 0
this._processing = false
// console.error(".. .. .. emitting drain")
this.emit("drain")
}
EntryWriter.prototype.destroy = function () {}

220
Client/node_modules/tar/lib/entry.js generated vendored Executable file
View File

@@ -0,0 +1,220 @@
// A passthrough read/write stream that sets its properties
// based on a header, extendedHeader, and globalHeader
//
// Can be either a file system object of some sort, or
// a pax/ustar metadata entry.
module.exports = Entry
var TarHeader = require("./header.js")
, tar = require("../tar")
, assert = require("assert").ok
, Stream = require("stream").Stream
, inherits = require("inherits")
, fstream = require("fstream").Abstract
function Entry (header, extended, global) {
Stream.call(this)
this.readable = true
this.writable = true
this._needDrain = false
this._paused = false
this._reading = false
this._ending = false
this._ended = false
this._remaining = 0
this._abort = false
this._queue = []
this._index = 0
this._queueLen = 0
this._read = this._read.bind(this)
this.props = {}
this._header = header
this._extended = extended || {}
// globals can change throughout the course of
// a file parse operation. Freeze it at its current state.
this._global = {}
var me = this
Object.keys(global || {}).forEach(function (g) {
me._global[g] = global[g]
})
this._setProps()
}
inherits(Entry, Stream)
Entry.prototype.write = function (c) {
if (this._ending) this.error("write() after end()", null, true)
if (this._remaining === 0) {
this.error("invalid bytes past eof")
}
// often we'll get a bunch of \0 at the end of the last write,
// since chunks will always be 512 bytes when reading a tarball.
if (c.length > this._remaining) {
c = c.slice(0, this._remaining)
}
this._remaining -= c.length
// put it on the stack.
var ql = this._queueLen
this._queue.push(c)
this._queueLen ++
this._read()
// either paused, or buffered
if (this._paused || ql > 0) {
this._needDrain = true
return false
}
return true
}
Entry.prototype.end = function (c) {
if (c) this.write(c)
this._ending = true
this._read()
}
Entry.prototype.pause = function () {
this._paused = true
this.emit("pause")
}
Entry.prototype.resume = function () {
// console.error(" Tar Entry resume", this.path)
this.emit("resume")
this._paused = false
this._read()
return this._queueLen - this._index > 1
}
// This is bound to the instance
Entry.prototype._read = function () {
// console.error(" Tar Entry _read", this.path)
if (this._paused || this._reading || this._ended) return
// set this flag so that event handlers don't inadvertently
// get multiple _read() calls running.
this._reading = true
// have any data to emit?
while (this._index < this._queueLen && !this._paused) {
var chunk = this._queue[this._index ++]
this.emit("data", chunk)
}
// check if we're drained
if (this._index >= this._queueLen) {
this._queue.length = this._queueLen = this._index = 0
if (this._needDrain) {
this._needDrain = false
this.emit("drain")
}
if (this._ending) {
this._ended = true
this.emit("end")
}
}
// if the queue gets too big, then pluck off whatever we can.
// this should be fairly rare.
var mql = this._maxQueueLen
if (this._queueLen > mql && this._index > 0) {
mql = Math.min(this._index, mql)
this._index -= mql
this._queueLen -= mql
this._queue = this._queue.slice(mql)
}
this._reading = false
}
Entry.prototype._setProps = function () {
// props = extended->global->header->{}
var header = this._header
, extended = this._extended
, global = this._global
, props = this.props
// first get the values from the normal header.
var fields = tar.fields
for (var f = 0; fields[f] !== null; f ++) {
var field = fields[f]
, val = header[field]
if (typeof val !== "undefined") props[field] = val
}
// next, the global header for this file.
// numeric values, etc, will have already been parsed.
;[global, extended].forEach(function (p) {
Object.keys(p).forEach(function (f) {
if (typeof p[f] !== "undefined") props[f] = p[f]
})
})
// no nulls allowed in path or linkpath
;["path", "linkpath"].forEach(function (p) {
if (props.hasOwnProperty(p)) {
props[p] = props[p].split("\0")[0]
}
})
// set date fields to be a proper date
;["mtime", "ctime", "atime"].forEach(function (p) {
if (props.hasOwnProperty(p)) {
props[p] = new Date(props[p] * 1000)
}
})
// set the type so that we know what kind of file to create
var type
switch (tar.types[props.type]) {
case "OldFile":
case "ContiguousFile":
type = "File"
break
case "GNUDumpDir":
type = "Directory"
break
case undefined:
type = "Unknown"
break
case "Link":
case "SymbolicLink":
case "CharacterDevice":
case "BlockDevice":
case "Directory":
case "FIFO":
default:
type = tar.types[props.type]
}
this.type = type
this.path = props.path
this.size = props.size
// size is special, since it signals when the file needs to end.
this._remaining = props.size
}
// the parser may not call write if _abort is true.
// useful for skipping data from some files quickly.
Entry.prototype.abort = function(){
this._abort = true
}
Entry.prototype.warn = fstream.warn
Entry.prototype.error = fstream.error

191
Client/node_modules/tar/lib/extended-header-writer.js generated vendored Executable file
View File

@@ -0,0 +1,191 @@
module.exports = ExtendedHeaderWriter
var inherits = require("inherits")
, EntryWriter = require("./entry-writer.js")
inherits(ExtendedHeaderWriter, EntryWriter)
var tar = require("../tar.js")
, path = require("path")
, TarHeader = require("./header.js")
// props is the props of the thing we need to write an
// extended header for.
// Don't be shy with it. Just encode everything.
function ExtendedHeaderWriter (props) {
// console.error(">> ehw ctor")
var me = this
if (!(me instanceof ExtendedHeaderWriter)) {
return new ExtendedHeaderWriter(props)
}
me.fields = props
var p =
{ path : ("PaxHeader" + path.join("/", props.path || ""))
.replace(/\\/g, "/").substr(0, 100)
, mode : props.mode || 0666
, uid : props.uid || 0
, gid : props.gid || 0
, size : 0 // will be set later
, mtime : props.mtime || Date.now() / 1000
, type : "x"
, linkpath : ""
, ustar : "ustar\0"
, ustarver : "00"
, uname : props.uname || ""
, gname : props.gname || ""
, devmaj : props.devmaj || 0
, devmin : props.devmin || 0
}
EntryWriter.call(me, p)
// console.error(">> ehw props", me.props)
me.props = p
me._meta = true
}
ExtendedHeaderWriter.prototype.end = function () {
// console.error(">> ehw end")
var me = this
if (me._ended) return
me._ended = true
me._encodeFields()
if (me.props.size === 0) {
// nothing to write!
me._ready = true
me._stream.end()
return
}
me._stream.write(TarHeader.encode(me.props))
me.body.forEach(function (l) {
me._stream.write(l)
})
me._ready = true
// console.error(">> ehw _process calling end()", me.props)
this._stream.end()
}
ExtendedHeaderWriter.prototype._encodeFields = function () {
// console.error(">> ehw _encodeFields")
this.body = []
if (this.fields.prefix) {
this.fields.path = this.fields.prefix + "/" + this.fields.path
this.fields.prefix = ""
}
encodeFields(this.fields, "", this.body, this.fields.noProprietary)
var me = this
this.body.forEach(function (l) {
me.props.size += l.length
})
}
function encodeFields (fields, prefix, body, nop) {
// console.error(">> >> ehw encodeFields")
// "%d %s=%s\n", <length>, <keyword>, <value>
// The length is a decimal number, and includes itself and the \n
// Numeric values are decimal strings.
Object.keys(fields).forEach(function (k) {
var val = fields[k]
, numeric = tar.numeric[k]
if (prefix) k = prefix + "." + k
// already including NODETAR.type, don't need File=true also
if (k === fields.type && val === true) return
switch (k) {
// don't include anything that's always handled just fine
// in the normal header, or only meaningful in the context
// of nodetar
case "mode":
case "cksum":
case "ustar":
case "ustarver":
case "prefix":
case "basename":
case "dirname":
case "needExtended":
case "block":
case "filter":
return
case "rdev":
if (val === 0) return
break
case "nlink":
case "dev": // Truly a hero among men, Creator of Star!
case "ino": // Speak his name with reverent awe! It is:
k = "SCHILY." + k
break
default: break
}
if (val && typeof val === "object" &&
!Buffer.isBuffer(val)) encodeFields(val, k, body, nop)
else if (val === null || val === undefined) return
else body.push.apply(body, encodeField(k, val, nop))
})
return body
}
function encodeField (k, v, nop) {
// lowercase keys must be valid, otherwise prefix with
// "NODETAR."
if (k.charAt(0) === k.charAt(0).toLowerCase()) {
var m = k.split(".")[0]
if (!tar.knownExtended[m]) k = "NODETAR." + k
}
// no proprietary
if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) {
return []
}
if (typeof val === "number") val = val.toString(10)
var s = new Buffer(" " + k + "=" + v + "\n")
, digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1
// console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
// if adding that many digits will make it go over that length,
// then add one to it. For example, if the string is:
// " foo=bar\n"
// then that's 9 characters. With the "9", that bumps the length
// up to 10. However, this is invalid:
// "10 foo=bar\n"
// but, since that's actually 11 characters, since 10 adds another
// character to the length, and the length includes the number
// itself. In that case, just bump it up again.
if (s.length + digits >= Math.pow(10, digits)) digits += 1
// console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
var len = digits + s.length
// console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len)
var lenBuf = new Buffer("" + len)
if (lenBuf.length + s.length !== len) {
throw new Error("Bad length calculation\n"+
"len="+len+"\n"+
"lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+
"lenBuf.length="+lenBuf.length+"\n"+
"digits="+digits+"\n"+
"s="+JSON.stringify(s.toString())+"\n"+
"s.length="+s.length)
}
return [lenBuf, s]
}

140
Client/node_modules/tar/lib/extended-header.js generated vendored Executable file
View File

@@ -0,0 +1,140 @@
// An Entry consisting of:
//
// "%d %s=%s\n", <length>, <keyword>, <value>
//
// The length is a decimal number, and includes itself and the \n
// \0 does not terminate anything. Only the length terminates the string.
// Numeric values are decimal strings.
module.exports = ExtendedHeader
var Entry = require("./entry.js")
, inherits = require("inherits")
, tar = require("../tar.js")
, numeric = tar.numeric
, keyTrans = { "SCHILY.dev": "dev"
, "SCHILY.ino": "ino"
, "SCHILY.nlink": "nlink" }
function ExtendedHeader () {
Entry.apply(this, arguments)
this.on("data", this._parse)
this.fields = {}
this._position = 0
this._fieldPos = 0
this._state = SIZE
this._sizeBuf = []
this._keyBuf = []
this._valBuf = []
this._size = -1
this._key = ""
}
inherits(ExtendedHeader, Entry)
ExtendedHeader.prototype._parse = parse
var s = 0
, states = ExtendedHeader.states = {}
, SIZE = states.SIZE = s++
, KEY = states.KEY = s++
, VAL = states.VAL = s++
, ERR = states.ERR = s++
Object.keys(states).forEach(function (s) {
states[states[s]] = states[s]
})
states[s] = null
// char code values for comparison
var _0 = "0".charCodeAt(0)
, _9 = "9".charCodeAt(0)
, point = ".".charCodeAt(0)
, a = "a".charCodeAt(0)
, Z = "Z".charCodeAt(0)
, a = "a".charCodeAt(0)
, z = "z".charCodeAt(0)
, space = " ".charCodeAt(0)
, eq = "=".charCodeAt(0)
, cr = "\n".charCodeAt(0)
function parse (c) {
if (this._state === ERR) return
for ( var i = 0, l = c.length
; i < l
; this._position++, this._fieldPos++, i++) {
// console.error("top of loop, size="+this._size)
var b = c[i]
if (this._size >= 0 && this._fieldPos > this._size) {
error(this, "field exceeds length="+this._size)
return
}
switch (this._state) {
case ERR: return
case SIZE:
// console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString())
if (b === space) {
this._state = KEY
// this._fieldPos = this._sizeBuf.length
this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10)
this._sizeBuf.length = 0
continue
}
if (b < _0 || b > _9) {
error(this, "expected [" + _0 + ".." + _9 + "], got " + b)
return
}
this._sizeBuf.push(b)
continue
case KEY:
// can be any char except =, not > size.
if (b === eq) {
this._state = VAL
this._key = new Buffer(this._keyBuf).toString()
if (keyTrans[this._key]) this._key = keyTrans[this._key]
this._keyBuf.length = 0
continue
}
this._keyBuf.push(b)
continue
case VAL:
// field must end with cr
if (this._fieldPos === this._size - 1) {
// console.error("finished with "+this._key)
if (b !== cr) {
error(this, "expected \\n at end of field")
return
}
var val = new Buffer(this._valBuf).toString()
if (numeric[this._key]) {
val = parseFloat(val)
}
this.fields[this._key] = val
this._valBuf.length = 0
this._state = SIZE
this._size = -1
this._fieldPos = -1
continue
}
this._valBuf.push(b)
continue
}
}
}
function error (me, msg) {
msg = "invalid header: " + msg
+ "\nposition=" + me._position
+ "\nfield position=" + me._fieldPos
me.error(msg)
me.state = ERR
}

94
Client/node_modules/tar/lib/extract.js generated vendored Executable file
View File

@@ -0,0 +1,94 @@
// give it a tarball and a path, and it'll dump the contents
module.exports = Extract
var tar = require("../tar.js")
, fstream = require("fstream")
, inherits = require("inherits")
, path = require("path")
function Extract (opts) {
if (!(this instanceof Extract)) return new Extract(opts)
tar.Parse.apply(this)
if (typeof opts !== "object") {
opts = { path: opts }
}
// better to drop in cwd? seems more standard.
opts.path = opts.path || path.resolve("node-tar-extract")
opts.type = "Directory"
opts.Directory = true
// similar to --strip or --strip-components
opts.strip = +opts.strip
if (!opts.strip || opts.strip <= 0) opts.strip = 0
this._fst = fstream.Writer(opts)
this.pause()
var me = this
// Hardlinks in tarballs are relative to the root
// of the tarball. So, they need to be resolved against
// the target directory in order to be created properly.
me.on("entry", function (entry) {
// if there's a "strip" argument, then strip off that many
// path components.
if (opts.strip) {
var p = entry.path.split("/").slice(opts.strip).join("/")
entry.path = entry.props.path = p
if (entry.linkpath) {
var lp = entry.linkpath.split("/").slice(opts.strip).join("/")
entry.linkpath = entry.props.linkpath = lp
}
}
if (entry.type === "Link") {
entry.linkpath = entry.props.linkpath =
path.join(opts.path, path.join("/", entry.props.linkpath))
}
if (entry.type === "SymbolicLink") {
var dn = path.dirname(entry.path) || ""
var linkpath = entry.props.linkpath
var target = path.resolve(opts.path, dn, linkpath)
if (target.indexOf(opts.path) !== 0) {
linkpath = path.join(opts.path, path.join("/", linkpath))
}
entry.linkpath = entry.props.linkpath = linkpath
}
})
this._fst.on("ready", function () {
me.pipe(me._fst, { end: false })
me.resume()
})
this._fst.on('error', function(err) {
me.emit('error', err)
})
this._fst.on('drain', function() {
me.emit('drain')
})
// this._fst.on("end", function () {
// console.error("\nEEEE Extract End", me._fst.path)
// })
this._fst.on("close", function () {
// console.error("\nEEEE Extract End", me._fst.path)
me.emit("finish")
me.emit("end")
me.emit("close")
})
}
inherits(Extract, tar.Parse)
Extract.prototype._streamEnd = function () {
var me = this
if (!me._ended || me._entry) me.error("unexpected eof")
me._fst.end()
// my .end() is coming later.
}

14
Client/node_modules/tar/lib/global-header-writer.js generated vendored Executable file
View File

@@ -0,0 +1,14 @@
module.exports = GlobalHeaderWriter
var ExtendedHeaderWriter = require("./extended-header-writer.js")
, inherits = require("inherits")
inherits(GlobalHeaderWriter, ExtendedHeaderWriter)
function GlobalHeaderWriter (props) {
if (!(this instanceof GlobalHeaderWriter)) {
return new GlobalHeaderWriter(props)
}
ExtendedHeaderWriter.call(this, props)
this.props.type = "g"
}

385
Client/node_modules/tar/lib/header.js generated vendored Executable file
View File

@@ -0,0 +1,385 @@
// parse a 512-byte header block to a data object, or vice-versa
// If the data won't fit nicely in a simple header, then generate
// the appropriate extended header file, and return that.
module.exports = TarHeader
var tar = require("../tar.js")
, fields = tar.fields
, fieldOffs = tar.fieldOffs
, fieldEnds = tar.fieldEnds
, fieldSize = tar.fieldSize
, numeric = tar.numeric
, assert = require("assert").ok
, space = " ".charCodeAt(0)
, slash = "/".charCodeAt(0)
, bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null
function TarHeader (block) {
if (!(this instanceof TarHeader)) return new TarHeader(block)
if (block) this.decode(block)
}
TarHeader.prototype =
{ decode : decode
, encode: encode
, calcSum: calcSum
, checkSum: checkSum
}
TarHeader.parseNumeric = parseNumeric
TarHeader.encode = encode
TarHeader.decode = decode
// note that this will only do the normal ustar header, not any kind
// of extended posix header file. If something doesn't fit comfortably,
// then it will set obj.needExtended = true, and set the block to
// the closest approximation.
function encode (obj) {
if (!obj && !(this instanceof TarHeader)) throw new Error(
"encode must be called on a TarHeader, or supplied an object")
obj = obj || this
var block = obj.block = new Buffer(512)
// if the object has a "prefix", then that's actually an extension of
// the path field.
if (obj.prefix) {
// console.error("%% header encoding, got a prefix", obj.prefix)
obj.path = obj.prefix + "/" + obj.path
// console.error("%% header encoding, prefixed path", obj.path)
obj.prefix = ""
}
obj.needExtended = false
if (obj.mode) {
if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8)
obj.mode = obj.mode & 0777
}
for (var f = 0; fields[f] !== null; f ++) {
var field = fields[f]
, off = fieldOffs[f]
, end = fieldEnds[f]
, ret
switch (field) {
case "cksum":
// special, done below, after all the others
break
case "prefix":
// special, this is an extension of the "path" field.
// console.error("%% header encoding, skip prefix later")
break
case "type":
// convert from long name to a single char.
var type = obj.type || "0"
if (type.length > 1) {
type = tar.types[obj.type]
if (!type) type = "0"
}
writeText(block, off, end, type)
break
case "path":
// uses the "prefix" field if > 100 bytes, but <= 255
var pathLen = Buffer.byteLength(obj.path)
, pathFSize = fieldSize[fields.path]
, prefFSize = fieldSize[fields.prefix]
// paths between 100 and 255 should use the prefix field.
// longer than 255
if (pathLen > pathFSize &&
pathLen <= pathFSize + prefFSize) {
// need to find a slash somewhere in the middle so that
// path and prefix both fit in their respective fields
var searchStart = pathLen - 1 - pathFSize
, searchEnd = prefFSize
, found = false
, pathBuf = new Buffer(obj.path)
for ( var s = searchStart
; (s <= searchEnd)
; s ++ ) {
if (pathBuf[s] === slash || pathBuf[s] === bslash) {
found = s
break
}
}
if (found !== false) {
prefix = pathBuf.slice(0, found).toString("utf8")
path = pathBuf.slice(found + 1).toString("utf8")
ret = writeText(block, off, end, path)
off = fieldOffs[fields.prefix]
end = fieldEnds[fields.prefix]
// console.error("%% header writing prefix", off, end, prefix)
ret = writeText(block, off, end, prefix) || ret
break
}
}
// paths less than 100 chars don't need a prefix
// and paths longer than 255 need an extended header and will fail
// on old implementations no matter what we do here.
// Null out the prefix, and fallthrough to default.
// console.error("%% header writing no prefix")
var poff = fieldOffs[fields.prefix]
, pend = fieldEnds[fields.prefix]
writeText(block, poff, pend, "")
// fallthrough
// all other fields are numeric or text
default:
ret = numeric[field]
? writeNumeric(block, off, end, obj[field])
: writeText(block, off, end, obj[field] || "")
break
}
obj.needExtended = obj.needExtended || ret
}
var off = fieldOffs[fields.cksum]
, end = fieldEnds[fields.cksum]
writeNumeric(block, off, end, calcSum.call(this, block))
return block
}
// if it's a negative number, or greater than will fit,
// then use write256.
var MAXNUM = { 12: 077777777777
, 11: 07777777777
, 8 : 07777777
, 7 : 0777777 }
function writeNumeric (block, off, end, num) {
var writeLen = end - off
, maxNum = MAXNUM[writeLen] || 0
num = num || 0
// console.error(" numeric", num)
if (num instanceof Date ||
Object.prototype.toString.call(num) === "[object Date]") {
num = num.getTime() / 1000
}
if (num > maxNum || num < 0) {
write256(block, off, end, num)
// need an extended header if negative or too big.
return true
}
// god, tar is so annoying
// if the string is small enough, you should put a space
// between the octal string and the \0, but if it doesn't
// fit, then don't.
var numStr = Math.floor(num).toString(8)
if (num < MAXNUM[writeLen - 1]) numStr += " "
// pad with "0" chars
if (numStr.length < writeLen) {
numStr = (new Array(writeLen - numStr.length).join("0")) + numStr
}
if (numStr.length !== writeLen - 1) {
throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" +
"expected: "+writeLen)
}
block.write(numStr, off, writeLen, "utf8")
block[end - 1] = 0
}
function write256 (block, off, end, num) {
var buf = block.slice(off, end)
var positive = num >= 0
buf[0] = positive ? 0x80 : 0xFF
// get the number as a base-256 tuple
if (!positive) num *= -1
var tuple = []
do {
var n = num % 256
tuple.push(n)
num = (num - n) / 256
} while (num)
var bytes = tuple.length
var fill = buf.length - bytes
for (var i = 1; i < fill; i ++) {
buf[i] = positive ? 0 : 0xFF
}
// tuple is a base256 number, with [0] as the *least* significant byte
// if it's negative, then we need to flip all the bits once we hit the
// first non-zero bit. The 2's-complement is (0x100 - n), and the 1's-
// complement is (0xFF - n).
var zero = true
for (i = bytes; i > 0; i --) {
var byte = tuple[bytes - i]
if (positive) buf[fill + i] = byte
else if (zero && byte === 0) buf[fill + i] = 0
else if (zero) {
zero = false
buf[fill + i] = 0x100 - byte
} else buf[fill + i] = 0xFF - byte
}
}
function writeText (block, off, end, str) {
// strings are written as utf8, then padded with \0
var strLen = Buffer.byteLength(str)
, writeLen = Math.min(strLen, end - off)
// non-ascii fields need extended headers
// long fields get truncated
, needExtended = strLen !== str.length || strLen > writeLen
// write the string, and null-pad
if (writeLen > 0) block.write(str, off, writeLen, "utf8")
for (var i = off + writeLen; i < end; i ++) block[i] = 0
return needExtended
}
function calcSum (block) {
block = block || this.block
assert(Buffer.isBuffer(block) && block.length === 512)
if (!block) throw new Error("Need block to checksum")
// now figure out what it would be if the cksum was " "
var sum = 0
, start = fieldOffs[fields.cksum]
, end = fieldEnds[fields.cksum]
for (var i = 0; i < fieldOffs[fields.cksum]; i ++) {
sum += block[i]
}
for (var i = start; i < end; i ++) {
sum += space
}
for (var i = end; i < 512; i ++) {
sum += block[i]
}
return sum
}
function checkSum (block) {
var sum = calcSum.call(this, block)
block = block || this.block
var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum])
cksum = parseNumeric(cksum)
return cksum === sum
}
function decode (block) {
block = block || this.block
assert(Buffer.isBuffer(block) && block.length === 512)
this.block = block
this.cksumValid = this.checkSum()
var prefix = null
// slice off each field.
for (var f = 0; fields[f] !== null; f ++) {
var field = fields[f]
, val = block.slice(fieldOffs[f], fieldEnds[f])
switch (field) {
case "ustar":
// if not ustar, then everything after that is just padding.
if (val.toString() !== "ustar\0") {
this.ustar = false
return
} else {
// console.error("ustar:", val, val.toString())
this.ustar = val.toString()
}
break
// prefix is special, since it might signal the xstar header
case "prefix":
var atime = parseNumeric(val.slice(131, 131 + 12))
, ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12))
if ((val[130] === 0 || val[130] === space) &&
typeof atime === "number" &&
typeof ctime === "number" &&
val[131 + 12] === space &&
val[131 + 12 + 12] === space) {
this.atime = atime
this.ctime = ctime
val = val.slice(0, 130)
}
prefix = val.toString("utf8").replace(/\0+$/, "")
// console.error("%% header reading prefix", prefix)
break
// all other fields are null-padding text
// or a number.
default:
if (numeric[field]) {
this[field] = parseNumeric(val)
} else {
this[field] = val.toString("utf8").replace(/\0+$/, "")
}
break
}
}
// if we got a prefix, then prepend it to the path.
if (prefix) {
this.path = prefix + "/" + this.path
// console.error("%% header got a prefix", this.path)
}
}
function parse256 (buf) {
// first byte MUST be either 80 or FF
// 80 for positive, FF for 2's comp
var positive
if (buf[0] === 0x80) positive = true
else if (buf[0] === 0xFF) positive = false
else return null
// build up a base-256 tuple from the least sig to the highest
var zero = false
, tuple = []
for (var i = buf.length - 1; i > 0; i --) {
var byte = buf[i]
if (positive) tuple.push(byte)
else if (zero && byte === 0) tuple.push(0)
else if (zero) {
zero = false
tuple.push(0x100 - byte)
} else tuple.push(0xFF - byte)
}
for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) {
sum += tuple[i] * Math.pow(256, i)
}
return positive ? sum : -1 * sum
}
function parseNumeric (f) {
if (f[0] & 0x80) return parse256(f)
var str = f.toString("utf8").split("\0")[0].trim()
, res = parseInt(str, 8)
return isNaN(res) ? null : res
}

236
Client/node_modules/tar/lib/pack.js generated vendored Executable file
View File

@@ -0,0 +1,236 @@
// pipe in an fstream, and it'll make a tarball.
// key-value pair argument is global extended header props.
module.exports = Pack
var EntryWriter = require("./entry-writer.js")
, Stream = require("stream").Stream
, path = require("path")
, inherits = require("inherits")
, GlobalHeaderWriter = require("./global-header-writer.js")
, collect = require("fstream").collect
, eof = new Buffer(512)
for (var i = 0; i < 512; i ++) eof[i] = 0
inherits(Pack, Stream)
function Pack (props) {
// console.error("-- p ctor")
var me = this
if (!(me instanceof Pack)) return new Pack(props)
if (props) me._noProprietary = props.noProprietary
else me._noProprietary = false
me._global = props
me.readable = true
me.writable = true
me._buffer = []
// console.error("-- -- set current to null in ctor")
me._currentEntry = null
me._processing = false
me._pipeRoot = null
me.on("pipe", function (src) {
if (src.root === me._pipeRoot) return
me._pipeRoot = src
src.on("end", function () {
me._pipeRoot = null
})
me.add(src)
})
}
Pack.prototype.addGlobal = function (props) {
// console.error("-- p addGlobal")
if (this._didGlobal) return
this._didGlobal = true
var me = this
GlobalHeaderWriter(props)
.on("data", function (c) {
me.emit("data", c)
})
.end()
}
Pack.prototype.add = function (stream) {
if (this._global && !this._didGlobal) this.addGlobal(this._global)
if (this._ended) return this.emit("error", new Error("add after end"))
collect(stream)
this._buffer.push(stream)
this._process()
this._needDrain = this._buffer.length > 0
return !this._needDrain
}
Pack.prototype.pause = function () {
this._paused = true
if (this._currentEntry) this._currentEntry.pause()
this.emit("pause")
}
Pack.prototype.resume = function () {
this._paused = false
if (this._currentEntry) this._currentEntry.resume()
this.emit("resume")
this._process()
}
Pack.prototype.end = function () {
this._ended = true
this._buffer.push(eof)
this._process()
}
Pack.prototype._process = function () {
var me = this
if (me._paused || me._processing) {
return
}
var entry = me._buffer.shift()
if (!entry) {
if (me._needDrain) {
me.emit("drain")
}
return
}
if (entry.ready === false) {
// console.error("-- entry is not ready", entry)
me._buffer.unshift(entry)
entry.on("ready", function () {
// console.error("-- -- ready!", entry)
me._process()
})
return
}
me._processing = true
if (entry === eof) {
// need 2 ending null blocks.
me.emit("data", eof)
me.emit("data", eof)
me.emit("end")
me.emit("close")
return
}
// Change the path to be relative to the root dir that was
// added to the tarball.
//
// XXX This should be more like how -C works, so you can
// explicitly set a root dir, and also explicitly set a pathname
// in the tarball to use. That way we can skip a lot of extra
// work when resolving symlinks for bundled dependencies in npm.
var root = path.dirname((entry.root || entry).path);
if (me._global && me._global.fromBase && entry.root && entry.root.path) {
// user set 'fromBase: true' indicating tar root should be directory itself
root = entry.root.path;
}
var wprops = {}
Object.keys(entry.props || {}).forEach(function (k) {
wprops[k] = entry.props[k]
})
if (me._noProprietary) wprops.noProprietary = true
wprops.path = path.relative(root, entry.path || '')
// actually not a matter of opinion or taste.
if (process.platform === "win32") {
wprops.path = wprops.path.replace(/\\/g, "/")
}
if (!wprops.type)
wprops.type = 'Directory'
switch (wprops.type) {
// sockets not supported
case "Socket":
return
case "Directory":
wprops.path += "/"
wprops.size = 0
break
case "Link":
var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
wprops.linkpath = path.relative(root, lp) || "."
wprops.size = 0
break
case "SymbolicLink":
var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "."
wprops.size = 0
break
}
// console.error("-- new writer", wprops)
// if (!wprops.type) {
// // console.error("-- no type?", entry.constructor.name, entry)
// }
// console.error("-- -- set current to new writer", wprops.path)
var writer = me._currentEntry = EntryWriter(wprops)
writer.parent = me
// writer.on("end", function () {
// // console.error("-- -- writer end", writer.path)
// })
writer.on("data", function (c) {
me.emit("data", c)
})
writer.on("header", function () {
Buffer.prototype.toJSON = function () {
return this.toString().split(/\0/).join(".")
}
// console.error("-- -- writer header %j", writer.props)
if (writer.props.size === 0) nextEntry()
})
writer.on("close", nextEntry)
var ended = false
function nextEntry () {
if (ended) return
ended = true
// console.error("-- -- writer close", writer.path)
// console.error("-- -- set current to null", wprops.path)
me._currentEntry = null
me._processing = false
me._process()
}
writer.on("error", function (er) {
// console.error("-- -- writer error", writer.path)
me.emit("error", er)
})
// if it's the root, then there's no need to add its entries,
// or data, since they'll be added directly.
if (entry === me._pipeRoot) {
// console.error("-- is the root, don't auto-add")
writer.add = null
}
entry.pipe(writer)
}
Pack.prototype.destroy = function () {}
Pack.prototype.write = function () {}

275
Client/node_modules/tar/lib/parse.js generated vendored Executable file
View File

@@ -0,0 +1,275 @@
// A writable stream.
// It emits "entry" events, which provide a readable stream that has
// header info attached.
module.exports = Parse.create = Parse
var stream = require("stream")
, Stream = stream.Stream
, BlockStream = require("block-stream")
, tar = require("../tar.js")
, TarHeader = require("./header.js")
, Entry = require("./entry.js")
, BufferEntry = require("./buffer-entry.js")
, ExtendedHeader = require("./extended-header.js")
, assert = require("assert").ok
, inherits = require("inherits")
, fstream = require("fstream")
// reading a tar is a lot like reading a directory
// However, we're actually not going to run the ctor,
// since it does a stat and various other stuff.
// This inheritance gives us the pause/resume/pipe
// behavior that is desired.
inherits(Parse, fstream.Reader)
function Parse () {
var me = this
if (!(me instanceof Parse)) return new Parse()
// doesn't apply fstream.Reader ctor?
// no, becasue we don't want to stat/etc, we just
// want to get the entry/add logic from .pipe()
Stream.apply(me)
me.writable = true
me.readable = true
me._stream = new BlockStream(512)
me.position = 0
me._ended = false
me._stream.on("error", function (e) {
me.emit("error", e)
})
me._stream.on("data", function (c) {
me._process(c)
})
me._stream.on("end", function () {
me._streamEnd()
})
me._stream.on("drain", function () {
me.emit("drain")
})
}
// overridden in Extract class, since it needs to
// wait for its DirWriter part to finish before
// emitting "end"
Parse.prototype._streamEnd = function () {
var me = this
if (!me._ended || me._entry) me.error("unexpected eof")
me.emit("end")
}
// a tar reader is actually a filter, not just a readable stream.
// So, you should pipe a tarball stream into it, and it needs these
// write/end methods to do that.
Parse.prototype.write = function (c) {
if (this._ended) {
// gnutar puts a LOT of nulls at the end.
// you can keep writing these things forever.
// Just ignore them.
for (var i = 0, l = c.length; i > l; i ++) {
if (c[i] !== 0) return this.error("write() after end()")
}
return
}
return this._stream.write(c)
}
Parse.prototype.end = function (c) {
this._ended = true
return this._stream.end(c)
}
// don't need to do anything, since we're just
// proxying the data up from the _stream.
// Just need to override the parent's "Not Implemented"
// error-thrower.
Parse.prototype._read = function () {}
Parse.prototype._process = function (c) {
assert(c && c.length === 512, "block size should be 512")
// one of three cases.
// 1. A new header
// 2. A part of a file/extended header
// 3. One of two or more EOF null blocks
if (this._entry) {
var entry = this._entry
if(!entry._abort) entry.write(c)
else {
entry._remaining -= c.length
if(entry._remaining < 0) entry._remaining = 0
}
if (entry._remaining === 0) {
entry.end()
this._entry = null
}
} else {
// either zeroes or a header
var zero = true
for (var i = 0; i < 512 && zero; i ++) {
zero = c[i] === 0
}
// eof is *at least* 2 blocks of nulls, and then the end of the
// file. you can put blocks of nulls between entries anywhere,
// so appending one tarball to another is technically valid.
// ending without the eof null blocks is not allowed, however.
if (zero) {
if (this._eofStarted)
this._ended = true
this._eofStarted = true
} else {
this._eofStarted = false
this._startEntry(c)
}
}
this.position += 512
}
// take a header chunk, start the right kind of entry.
Parse.prototype._startEntry = function (c) {
var header = new TarHeader(c)
, self = this
, entry
, ev
, EntryType
, onend
, meta = false
if (null === header.size || !header.cksumValid) {
var e = new Error("invalid tar file")
e.header = header
e.tar_file_offset = this.position
e.tar_block = this.position / 512
return this.emit("error", e)
}
switch (tar.types[header.type]) {
case "File":
case "OldFile":
case "Link":
case "SymbolicLink":
case "CharacterDevice":
case "BlockDevice":
case "Directory":
case "FIFO":
case "ContiguousFile":
case "GNUDumpDir":
// start a file.
// pass in any extended headers
// These ones consumers are typically most interested in.
EntryType = Entry
ev = "entry"
break
case "GlobalExtendedHeader":
// extended headers that apply to the rest of the tarball
EntryType = ExtendedHeader
onend = function () {
self._global = self._global || {}
Object.keys(entry.fields).forEach(function (k) {
self._global[k] = entry.fields[k]
})
}
ev = "globalExtendedHeader"
meta = true
break
case "ExtendedHeader":
case "OldExtendedHeader":
// extended headers that apply to the next entry
EntryType = ExtendedHeader
onend = function () {
self._extended = entry.fields
}
ev = "extendedHeader"
meta = true
break
case "NextFileHasLongLinkpath":
// set linkpath=<contents> in extended header
EntryType = BufferEntry
onend = function () {
self._extended = self._extended || {}
self._extended.linkpath = entry.body
}
ev = "longLinkpath"
meta = true
break
case "NextFileHasLongPath":
case "OldGnuLongPath":
// set path=<contents> in file-extended header
EntryType = BufferEntry
onend = function () {
self._extended = self._extended || {}
self._extended.path = entry.body
}
ev = "longPath"
meta = true
break
default:
// all the rest we skip, but still set the _entry
// member, so that we can skip over their data appropriately.
// emit an event to say that this is an ignored entry type?
EntryType = Entry
ev = "ignoredEntry"
break
}
var global, extended
if (meta) {
global = extended = null
} else {
var global = this._global
var extended = this._extended
// extendedHeader only applies to one entry, so once we start
// an entry, it's over.
this._extended = null
}
entry = new EntryType(header, extended, global)
entry.meta = meta
// only proxy data events of normal files.
if (!meta) {
entry.on("data", function (c) {
me.emit("data", c)
})
}
if (onend) entry.on("end", onend)
this._entry = entry
var me = this
entry.on("pause", function () {
me.pause()
})
entry.on("resume", function () {
me.resume()
})
if (this.listeners("*").length) {
this.emit("*", ev, entry)
}
this.emit(ev, entry)
// Zero-byte entry. End immediately.
if (entry.props.size === 0) {
entry.end()
this._entry = null
}
}

59
Client/node_modules/tar/package.json generated vendored Executable file
View File

@@ -0,0 +1,59 @@
{
"_from": "tar@^2.2.1",
"_id": "tar@2.2.1",
"_inBundle": false,
"_integrity": "sha1-jk0qJWwOIYXGsYrWlK7JaLg8sdE=",
"_location": "/tar",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "tar@^2.2.1",
"name": "tar",
"escapedName": "tar",
"rawSpec": "^2.2.1",
"saveSpec": null,
"fetchSpec": "^2.2.1"
},
"_requiredBy": [
"/gulp-untar"
],
"_resolved": "http://registry.npmjs.org/tar/-/tar-2.2.1.tgz",
"_shasum": "8e4d2a256c0e2185c6b18ad694aec968b83cb1d1",
"_spec": "tar@^2.2.1",
"_where": "/home/nathan/Projects/Upsilon/UpsilonVsCodeLanguageServer/Client/node_modules/gulp-untar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/node-tar/issues"
},
"bundleDependencies": false,
"dependencies": {
"block-stream": "*",
"fstream": "^1.0.2",
"inherits": "2"
},
"deprecated": false,
"description": "tar for node",
"devDependencies": {
"graceful-fs": "^4.1.2",
"mkdirp": "^0.5.0",
"rimraf": "1.x",
"tap": "0.x"
},
"homepage": "https://github.com/isaacs/node-tar#readme",
"license": "ISC",
"main": "tar.js",
"name": "tar",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-tar.git"
},
"scripts": {
"test": "tap test/*.js"
},
"version": "2.2.1"
}

173
Client/node_modules/tar/tar.js generated vendored Executable file
View File

@@ -0,0 +1,173 @@
// field paths that every tar file must have.
// header is padded to 512 bytes.
var f = 0
, fields = {}
, path = fields.path = f++
, mode = fields.mode = f++
, uid = fields.uid = f++
, gid = fields.gid = f++
, size = fields.size = f++
, mtime = fields.mtime = f++
, cksum = fields.cksum = f++
, type = fields.type = f++
, linkpath = fields.linkpath = f++
, headerSize = 512
, blockSize = 512
, fieldSize = []
fieldSize[path] = 100
fieldSize[mode] = 8
fieldSize[uid] = 8
fieldSize[gid] = 8
fieldSize[size] = 12
fieldSize[mtime] = 12
fieldSize[cksum] = 8
fieldSize[type] = 1
fieldSize[linkpath] = 100
// "ustar\0" may introduce another bunch of headers.
// these are optional, and will be nulled out if not present.
var ustar = fields.ustar = f++
, ustarver = fields.ustarver = f++
, uname = fields.uname = f++
, gname = fields.gname = f++
, devmaj = fields.devmaj = f++
, devmin = fields.devmin = f++
, prefix = fields.prefix = f++
, fill = fields.fill = f++
// terminate fields.
fields[f] = null
fieldSize[ustar] = 6
fieldSize[ustarver] = 2
fieldSize[uname] = 32
fieldSize[gname] = 32
fieldSize[devmaj] = 8
fieldSize[devmin] = 8
fieldSize[prefix] = 155
fieldSize[fill] = 12
// nb: prefix field may in fact be 130 bytes of prefix,
// a null char, 12 bytes for atime, 12 bytes for ctime.
//
// To recognize this format:
// 1. prefix[130] === ' ' or '\0'
// 2. atime and ctime are octal numeric values
// 3. atime and ctime have ' ' in their last byte
var fieldEnds = {}
, fieldOffs = {}
, fe = 0
for (var i = 0; i < f; i ++) {
fieldOffs[i] = fe
fieldEnds[i] = (fe += fieldSize[i])
}
// build a translation table of field paths.
Object.keys(fields).forEach(function (f) {
if (fields[f] !== null) fields[fields[f]] = f
})
// different values of the 'type' field
// paths match the values of Stats.isX() functions, where appropriate
var types =
{ 0: "File"
, "\0": "OldFile" // like 0
, "": "OldFile"
, 1: "Link"
, 2: "SymbolicLink"
, 3: "CharacterDevice"
, 4: "BlockDevice"
, 5: "Directory"
, 6: "FIFO"
, 7: "ContiguousFile" // like 0
// posix headers
, g: "GlobalExtendedHeader" // k=v for the rest of the archive
, x: "ExtendedHeader" // k=v for the next file
// vendor-specific stuff
, A: "SolarisACL" // skip
, D: "GNUDumpDir" // like 5, but with data, which should be skipped
, I: "Inode" // metadata only, skip
, K: "NextFileHasLongLinkpath" // data = link path of next file
, L: "NextFileHasLongPath" // data = path of next file
, M: "ContinuationFile" // skip
, N: "OldGnuLongPath" // like L
, S: "SparseFile" // skip
, V: "TapeVolumeHeader" // skip
, X: "OldExtendedHeader" // like x
}
Object.keys(types).forEach(function (t) {
types[types[t]] = types[types[t]] || t
})
// values for the mode field
var modes =
{ suid: 04000 // set uid on extraction
, sgid: 02000 // set gid on extraction
, svtx: 01000 // set restricted deletion flag on dirs on extraction
, uread: 0400
, uwrite: 0200
, uexec: 0100
, gread: 040
, gwrite: 020
, gexec: 010
, oread: 4
, owrite: 2
, oexec: 1
, all: 07777
}
var numeric =
{ mode: true
, uid: true
, gid: true
, size: true
, mtime: true
, devmaj: true
, devmin: true
, cksum: true
, atime: true
, ctime: true
, dev: true
, ino: true
, nlink: true
}
Object.keys(modes).forEach(function (t) {
modes[modes[t]] = modes[modes[t]] || t
})
var knownExtended =
{ atime: true
, charset: true
, comment: true
, ctime: true
, gid: true
, gname: true
, linkpath: true
, mtime: true
, path: true
, realtime: true
, security: true
, size: true
, uid: true
, uname: true }
exports.fields = fields
exports.fieldSize = fieldSize
exports.fieldOffs = fieldOffs
exports.fieldEnds = fieldEnds
exports.types = types
exports.modes = modes
exports.numeric = numeric
exports.headerSize = headerSize
exports.blockSize = blockSize
exports.knownExtended = knownExtended
exports.Pack = require("./lib/pack.js")
exports.Parse = require("./lib/parse.js")
exports.Extract = require("./lib/extract.js")

53
Client/node_modules/tar/test/00-setup-fixtures.js generated vendored Executable file
View File

@@ -0,0 +1,53 @@
// the fixtures have some weird stuff that is painful
// to include directly in the repo for various reasons.
//
// So, unpack the fixtures with the system tar first.
//
// This means, of course, that it'll only work if you
// already have a tar implementation, and some of them
// will not properly unpack the fixtures anyway.
//
// But, since usually those tests will fail on Windows
// and other systems with less capable filesystems anyway,
// at least this way we don't cause inconveniences by
// merely cloning the repo or installing the package.
var tap = require("tap")
, child_process = require("child_process")
, rimraf = require("rimraf")
, test = tap.test
, path = require("path")
test("clean fixtures", function (t) {
rimraf(path.resolve(__dirname, "fixtures"), function (er) {
t.ifError(er, "rimraf ./fixtures/")
t.end()
})
})
test("clean tmp", function (t) {
rimraf(path.resolve(__dirname, "tmp"), function (er) {
t.ifError(er, "rimraf ./tmp/")
t.end()
})
})
test("extract fixtures", function (t) {
var c = child_process.spawn("tar"
,["xzvf", "fixtures.tgz"]
,{ cwd: __dirname })
c.stdout.on("data", errwrite)
c.stderr.on("data", errwrite)
function errwrite (chunk) {
process.stderr.write(chunk)
}
c.on("exit", function (code) {
t.equal(code, 0, "extract fixtures should exit with 0")
if (code) {
t.comment("Note, all tests from here on out will fail because of this.")
}
t.end()
})
})

BIN
Client/node_modules/tar/test/cb-never-called-1.0.1.tgz generated vendored Executable file

Binary file not shown.

177
Client/node_modules/tar/test/dir-normalization.js generated vendored Executable file
View File

@@ -0,0 +1,177 @@
// Set the umask, so that it works the same everywhere.
process.umask(parseInt('22', 8))
var fs = require('fs')
var path = require('path')
var fstream = require('fstream')
var test = require('tap').test
var tar = require('../tar.js')
var file = path.resolve(__dirname, 'dir-normalization.tar')
var target = path.resolve(__dirname, 'tmp/dir-normalization-test')
var ee = 0
var expectEntries = [
{ path: 'fixtures/',
mode: '755',
type: '5',
linkpath: ''
},
{ path: 'fixtures/a/',
mode: '755',
type: '5',
linkpath: ''
},
{ path: 'fixtures/the-chumbler',
mode: '755',
type: '2',
linkpath: path.resolve(target, 'a/b/c/d/the-chumbler'),
},
{ path: 'fixtures/a/b/',
mode: '755',
type: '5',
linkpath: ''
},
{ path: 'fixtures/a/x',
mode: '644',
type: '0',
linkpath: ''
},
{ path: 'fixtures/a/b/c/',
mode: '755',
type: '5',
linkpath: ''
},
{ path: 'fixtures/a/b/c/y',
mode: '755',
type: '2',
linkpath: '../../x',
}
]
var ef = 0
var expectFiles = [
{ path: '',
mode: '40755',
type: 'Directory',
depth: 0,
linkpath: undefined
},
{ path: '/fixtures',
mode: '40755',
type: 'Directory',
depth: 1,
linkpath: undefined
},
{ path: '/fixtures/a',
mode: '40755',
type: 'Directory',
depth: 2,
linkpath: undefined
},
{ path: '/fixtures/a/b',
mode: '40755',
type: 'Directory',
depth: 3,
linkpath: undefined
},
{ path: '/fixtures/a/b/c',
mode: '40755',
type: 'Directory',
depth: 4,
linkpath: undefined
},
{ path: '/fixtures/a/b/c/y',
mode: '120755',
type: 'SymbolicLink',
depth: 5,
linkpath: '../../x'
},
{ path: '/fixtures/a/x',
mode: '100644',
type: 'File',
depth: 3,
linkpath: undefined
},
{ path: '/fixtures/the-chumbler',
mode: '120755',
type: 'SymbolicLink',
depth: 2,
linkpath: path.resolve(target, 'a/b/c/d/the-chumbler')
}
]
test('preclean', function (t) {
require('rimraf').sync(path.join(__dirname, '/tmp/dir-normalization-test'))
t.pass('cleaned!')
t.end()
})
test('extract test', function (t) {
var extract = tar.Extract(target)
var inp = fs.createReadStream(file)
inp.pipe(extract)
extract.on('end', function () {
t.equal(ee, expectEntries.length, 'should see ' + expectEntries.length + ' entries')
// should get no more entries after end
extract.removeAllListeners('entry')
extract.on('entry', function (e) {
t.fail('Should not get entries after end!')
})
next()
})
extract.on('entry', function (entry) {
var mode = entry.props.mode & (~parseInt('22', 8))
var found = {
path: entry.path,
mode: mode.toString(8),
type: entry.props.type,
linkpath: entry.props.linkpath,
}
var wanted = expectEntries[ee++]
t.equivalent(found, wanted, 'tar entry ' + ee + ' ' + (wanted && wanted.path))
})
function next () {
var r = fstream.Reader({
path: target,
type: 'Directory',
sort: 'alpha'
})
r.on('ready', function () {
foundEntry(r)
})
r.on('end', finish)
function foundEntry (entry) {
var p = entry.path.substr(target.length)
var mode = entry.props.mode & (~parseInt('22', 8))
var found = {
path: p,
mode: mode.toString(8),
type: entry.props.type,
depth: entry.props.depth,
linkpath: entry.props.linkpath
}
var wanted = expectFiles[ef++]
t.equivalent(found, wanted, 'unpacked file ' + ef + ' ' + (wanted && wanted.path))
entry.on('entry', foundEntry)
}
function finish () {
t.equal(ef, expectFiles.length, 'should have ' + ef + ' items')
t.end()
}
}
})

BIN
Client/node_modules/tar/test/dir-normalization.tar generated vendored Executable file

Binary file not shown.

33
Client/node_modules/tar/test/error-on-broken.js generated vendored Executable file
View File

@@ -0,0 +1,33 @@
var fs = require('fs')
var path = require('path')
var zlib = require('zlib')
var tap = require('tap')
var tar = require('../tar.js')
var file = path.join(__dirname, 'cb-never-called-1.0.1.tgz')
var target = path.join(__dirname, 'tmp/extract-test')
tap.test('preclean', function (t) {
require('rimraf').sync(__dirname + '/tmp/extract-test')
t.pass('cleaned!')
t.end()
})
tap.test('extract test', function (t) {
var extract = tar.Extract(target)
var inp = fs.createReadStream(file)
inp.pipe(zlib.createGunzip()).pipe(extract)
extract.on('error', function (er) {
t.equal(er.message, 'unexpected eof', 'error noticed')
t.end()
})
extract.on('end', function () {
t.fail('shouldn\'t reach this point due to errors')
t.end()
})
})

132
Client/node_modules/tar/test/extract-move.js generated vendored Executable file
View File

@@ -0,0 +1,132 @@
// Set the umask, so that it works the same everywhere.
process.umask(parseInt('22', 8))
var tap = require("tap")
, tar = require("../tar.js")
, fs = require("fs")
, gfs = require("graceful-fs")
, path = require("path")
, file = path.resolve(__dirname, "fixtures/dir.tar")
, target = path.resolve(__dirname, "tmp/extract-test")
, index = 0
, fstream = require("fstream")
, rimraf = require("rimraf")
, mkdirp = require("mkdirp")
, ee = 0
, expectEntries = [
{
"path" : "dir/",
"mode" : "750",
"type" : "5",
"depth" : undefined,
"size" : 0,
"linkpath" : "",
"nlink" : undefined,
"dev" : undefined,
"ino" : undefined
},
{
"path" : "dir/sub/",
"mode" : "750",
"type" : "5",
"depth" : undefined,
"size" : 0,
"linkpath" : "",
"nlink" : undefined,
"dev" : undefined,
"ino" : undefined
} ]
function slow (fs, method, t1, t2) {
var orig = fs[method]
if (!orig) return null
fs[method] = function () {
var args = [].slice.call(arguments)
console.error("slow", method, args[0])
var cb = args.pop()
setTimeout(function () {
orig.apply(fs, args.concat(function(er, data) {
setTimeout(function() {
cb(er, data)
}, t2)
}))
}, t1)
}
}
// Make sure we get the graceful-fs that fstream is using.
var gfs2
try {
gfs2 = require("fstream/node_modules/graceful-fs")
} catch (er) {}
var slowMethods = ["chown", "chmod", "utimes", "lutimes"]
slowMethods.forEach(function (method) {
var t1 = 500
var t2 = 0
slow(fs, method, t1, t2)
slow(gfs, method, t1, t2)
if (gfs2) {
slow(gfs2, method, t1, t2)
}
})
// The extract class basically just pipes the input
// to a Reader, and then to a fstream.DirWriter
// So, this is as much a test of fstream.Reader and fstream.Writer
// as it is of tar.Extract, but it sort of makes sense.
tap.test("preclean", function (t) {
rimraf.sync(target)
/mkdirp.sync(target)
t.pass("cleaned!")
t.end()
})
tap.test("extract test", function (t) {
var extract = tar.Extract(target)
var inp = fs.createReadStream(file)
// give it a weird buffer size to try to break in odd places
inp.bufferSize = 1234
inp.pipe(extract)
extract.on("end", function () {
rimraf.sync(target)
t.equal(ee, expectEntries.length, "should see "+ee+" entries")
// should get no more entries after end
extract.removeAllListeners("entry")
extract.on("entry", function (e) {
t.fail("Should not get entries after end!")
})
t.end()
})
extract.on("entry", function (entry) {
var found =
{ path: entry.path
, mode: entry.props.mode.toString(8)
, type: entry.props.type
, depth: entry.props.depth
, size: entry.props.size
, linkpath: entry.props.linkpath
, nlink: entry.props.nlink
, dev: entry.props.dev
, ino: entry.props.ino
}
var wanted = expectEntries[ee ++]
t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path)
})
})

367
Client/node_modules/tar/test/extract.js generated vendored Executable file
View File

@@ -0,0 +1,367 @@
// Set the umask, so that it works the same everywhere.
process.umask(parseInt('22', 8))
var tap = require("tap")
, tar = require("../tar.js")
, fs = require("fs")
, path = require("path")
, file = path.resolve(__dirname, "fixtures/c.tar")
, target = path.resolve(__dirname, "tmp/extract-test")
, index = 0
, fstream = require("fstream")
, ee = 0
, expectEntries =
[ { path: 'c.txt',
mode: '644',
type: '0',
depth: undefined,
size: 513,
linkpath: '',
nlink: undefined,
dev: undefined,
ino: undefined },
{ path: 'cc.txt',
mode: '644',
type: '0',
depth: undefined,
size: 513,
linkpath: '',
nlink: undefined,
dev: undefined,
ino: undefined },
{ path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: '644',
type: '0',
depth: undefined,
size: 100,
linkpath: '',
nlink: undefined,
dev: undefined,
ino: undefined },
{ path: 'Ω.txt',
mode: '644',
type: '0',
depth: undefined,
size: 2,
linkpath: '',
nlink: undefined,
dev: undefined,
ino: undefined },
{ path: 'Ω.txt',
mode: '644',
type: '0',
depth: undefined,
size: 2,
linkpath: '',
nlink: 1,
dev: 234881026,
ino: 51693379 },
{ path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: '644',
type: '0',
depth: undefined,
size: 200,
linkpath: '',
nlink: 1,
dev: 234881026,
ino: 51681874 },
{ path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: '644',
type: '0',
depth: undefined,
size: 201,
linkpath: '',
nlink: undefined,
dev: undefined,
ino: undefined },
{ path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
mode: '777',
type: '2',
depth: undefined,
size: 0,
linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
nlink: undefined,
dev: undefined,
ino: undefined },
{ path: '200-hard',
mode: '644',
type: '0',
depth: undefined,
size: 200,
linkpath: '',
nlink: 2,
dev: 234881026,
ino: 51681874 },
{ path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: '644',
type: '1',
depth: undefined,
size: 0,
linkpath: path.resolve(target, '200-hard'),
nlink: 2,
dev: 234881026,
ino: 51681874 } ]
, ef = 0
, expectFiles =
[ { path: '',
mode: '40755',
type: 'Directory',
depth: 0,
linkpath: undefined },
{ path: '/200-hard',
mode: '100644',
type: 'File',
depth: 1,
size: 200,
linkpath: undefined,
nlink: 2 },
{ path: '/200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
mode: '120777',
type: 'SymbolicLink',
depth: 1,
size: 200,
linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
nlink: 1 },
{ path: '/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: '100644',
type: 'Link',
depth: 1,
size: 200,
linkpath: path.join(target, '200-hard'),
nlink: 2 },
{ path: '/c.txt',
mode: '100644',
type: 'File',
depth: 1,
size: 513,
linkpath: undefined,
nlink: 1 },
{ path: '/cc.txt',
mode: '100644',
type: 'File',
depth: 1,
size: 513,
linkpath: undefined,
nlink: 1 },
{ path: '/r',
mode: '40755',
type: 'Directory',
depth: 1,
linkpath: undefined },
{ path: '/r/e',
mode: '40755',
type: 'Directory',
depth: 2,
linkpath: undefined },
{ path: '/r/e/a',
mode: '40755',
type: 'Directory',
depth: 3,
linkpath: undefined },
{ path: '/r/e/a/l',
mode: '40755',
type: 'Directory',
depth: 4,
linkpath: undefined },
{ path: '/r/e/a/l/l',
mode: '40755',
type: 'Directory',
depth: 5,
linkpath: undefined },
{ path: '/r/e/a/l/l/y',
mode: '40755',
type: 'Directory',
depth: 6,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-',
mode: '40755',
type: 'Directory',
depth: 7,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d',
mode: '40755',
type: 'Directory',
depth: 8,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e',
mode: '40755',
type: 'Directory',
depth: 9,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e',
mode: '40755',
type: 'Directory',
depth: 10,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p',
mode: '40755',
type: 'Directory',
depth: 11,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-',
mode: '40755',
type: 'Directory',
depth: 12,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f',
mode: '40755',
type: 'Directory',
depth: 13,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o',
mode: '40755',
type: 'Directory',
depth: 14,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l',
mode: '40755',
type: 'Directory',
depth: 15,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d',
mode: '40755',
type: 'Directory',
depth: 16,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e',
mode: '40755',
type: 'Directory',
depth: 17,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r',
mode: '40755',
type: 'Directory',
depth: 18,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-',
mode: '40755',
type: 'Directory',
depth: 19,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p',
mode: '40755',
type: 'Directory',
depth: 20,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a',
mode: '40755',
type: 'Directory',
depth: 21,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t',
mode: '40755',
type: 'Directory',
depth: 22,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h',
mode: '40755',
type: 'Directory',
depth: 23,
linkpath: undefined },
{ path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: '100644',
type: 'File',
depth: 24,
size: 100,
linkpath: undefined,
nlink: 1 },
{ path: '/Ω.txt',
mode: '100644',
type: 'File',
depth: 1,
size: 2,
linkpath: undefined,
nlink: 1 } ]
// The extract class basically just pipes the input
// to a Reader, and then to a fstream.DirWriter
// So, this is as much a test of fstream.Reader and fstream.Writer
// as it is of tar.Extract, but it sort of makes sense.
tap.test("preclean", function (t) {
require("rimraf").sync(__dirname + "/tmp/extract-test")
t.pass("cleaned!")
t.end()
})
tap.test("extract test", function (t) {
var extract = tar.Extract(target)
var inp = fs.createReadStream(file)
// give it a weird buffer size to try to break in odd places
inp.bufferSize = 1234
inp.pipe(extract)
extract.on("end", function () {
t.equal(ee, expectEntries.length, "should see "+ee+" entries")
// should get no more entries after end
extract.removeAllListeners("entry")
extract.on("entry", function (e) {
t.fail("Should not get entries after end!")
})
next()
})
extract.on("entry", function (entry) {
var found =
{ path: entry.path
, mode: entry.props.mode.toString(8)
, type: entry.props.type
, depth: entry.props.depth
, size: entry.props.size
, linkpath: entry.props.linkpath
, nlink: entry.props.nlink
, dev: entry.props.dev
, ino: entry.props.ino
}
var wanted = expectEntries[ee ++]
t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path)
})
function next () {
var r = fstream.Reader({ path: target
, type: "Directory"
// this is just to encourage consistency
, sort: "alpha" })
r.on("ready", function () {
foundEntry(r)
})
r.on("end", finish)
function foundEntry (entry) {
var p = entry.path.substr(target.length)
var found =
{ path: p
, mode: entry.props.mode.toString(8)
, type: entry.props.type
, depth: entry.props.depth
, size: entry.props.size
, linkpath: entry.props.linkpath
, nlink: entry.props.nlink
}
var wanted = expectFiles[ef ++]
t.has(found, wanted, "unpacked file " + ef + " " + wanted.path)
entry.on("entry", foundEntry)
}
function finish () {
t.equal(ef, expectFiles.length, "should have "+ef+" items")
t.end()
}
}
})

BIN
Client/node_modules/tar/test/fixtures.tgz generated vendored Executable file

Binary file not shown.

183
Client/node_modules/tar/test/header.js generated vendored Executable file
View File

@@ -0,0 +1,183 @@
var tap = require("tap")
var TarHeader = require("../lib/header.js")
var tar = require("../tar.js")
var fs = require("fs")
var headers =
{ "a.txt file header":
[ "612e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303430312031313635313336303333332030313234353100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
, { cksumValid: true
, path: 'a.txt'
, mode: 420
, uid: 24561
, gid: 20
, size: 257
, mtime: 1319493851
, cksum: 5417
, type: '0'
, linkpath: ''
, ustar: 'ustar\0'
, ustarver: '00'
, uname: 'isaacs'
, gname: 'staff'
, devmaj: 0
, devmin: 0
, fill: '' }
]
, "omega pax": // the extended header from omega tar.
[ "5061784865616465722fcea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303137302031313534333731303631312030313530353100207800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
, { cksumValid: true
, path: 'PaxHeader/Ω.txt'
, mode: 420
, uid: 24561
, gid: 20
, size: 120
, mtime: 1301254537
, cksum: 6697
, type: 'x'
, linkpath: ''
, ustar: 'ustar\0'
, ustarver: '00'
, uname: 'isaacs'
, gname: 'staff'
, devmaj: 0
, devmin: 0
, fill: '' } ]
, "omega file header":
[ "cea92e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030322031313534333731303631312030313330373200203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
, { cksumValid: true
, path: 'Ω.txt'
, mode: 420
, uid: 24561
, gid: 20
, size: 2
, mtime: 1301254537
, cksum: 5690
, type: '0'
, linkpath: ''
, ustar: 'ustar\0'
, ustarver: '00'
, uname: 'isaacs'
, gname: 'staff'
, devmaj: 0
, devmin: 0
, fill: '' } ]
, "foo.js file header":
[ "666f6f2e6a730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030342031313534333637303734312030313236313700203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
, { cksumValid: true
, path: 'foo.js'
, mode: 420
, uid: 24561
, gid: 20
, size: 4
, mtime: 1301246433
, cksum: 5519
, type: '0'
, linkpath: ''
, ustar: 'ustar\0'
, ustarver: '00'
, uname: 'isaacs'
, gname: 'staff'
, devmaj: 0
, devmin: 0
, fill: '' }
]
, "b.txt file header":
[ "622e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030313030302031313635313336303637372030313234363100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
, { cksumValid: true
, path: 'b.txt'
, mode: 420
, uid: 24561
, gid: 20
, size: 512
, mtime: 1319494079
, cksum: 5425
, type: '0'
, linkpath: ''
, ustar: 'ustar\0'
, ustarver: '00'
, uname: 'isaacs'
, gname: 'staff'
, devmaj: 0
, devmin: 0
, fill: '' }
]
, "deep nested file":
[ "636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363633030303634342000303537373631200030303030323420003030303030303030313434203131363532313531353333203034333331340020300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
, { cksumValid: true,
path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'
, mode: 420
, uid: 24561
, gid: 20
, size: 100
, mtime: 1319687003
, cksum: 18124
, type: '0'
, linkpath: ''
, ustar: 'ustar\0'
, ustarver: '00'
, uname: 'isaacs'
, gname: 'staff'
, devmaj: 0
, devmin: 0
, fill: '' }
]
}
tap.test("parsing", function (t) {
Object.keys(headers).forEach(function (name) {
var h = headers[name]
, header = new Buffer(h[0], "hex")
, expect = h[1]
, parsed = new TarHeader(header)
// console.error(parsed)
t.has(parsed, expect, "parse " + name)
})
t.end()
})
tap.test("encoding", function (t) {
Object.keys(headers).forEach(function (name) {
var h = headers[name]
, expect = new Buffer(h[0], "hex")
, encoded = TarHeader.encode(h[1])
// might have slightly different bytes, since the standard
// isn't very strict, but should have the same semantics
// checkSum will be different, but cksumValid will be true
var th = new TarHeader(encoded)
delete h[1].block
delete h[1].needExtended
delete h[1].cksum
t.has(th, h[1], "fields "+name)
})
t.end()
})
// test these manually. they're a bit rare to find in the wild
tap.test("parseNumeric tests", function (t) {
var parseNumeric = TarHeader.parseNumeric
, numbers =
{ "303737373737373700": 2097151
, "30373737373737373737373700": 8589934591
, "303030303036343400": 420
, "800000ffffffffffff": 281474976710655
, "ffffff000000000001": -281474976710654
, "ffffff000000000000": -281474976710655
, "800000000000200000": 2097152
, "8000000000001544c5": 1393861
, "ffffffffffff1544c5": -15383354 }
Object.keys(numbers).forEach(function (n) {
var b = new Buffer(n, "hex")
t.equal(parseNumeric(b), numbers[n], n + " === " + numbers[n])
})
t.end()
})

886
Client/node_modules/tar/test/pack-no-proprietary.js generated vendored Executable file
View File

@@ -0,0 +1,886 @@
// This is exactly like test/pack.js, except that it's excluding
// any proprietary headers.
//
// This loses some information about the filesystem, but creates
// tarballs that are supported by more versions of tar, especially
// old non-spec-compliant copies of gnutar.
// the symlink file is excluded from git, because it makes
// windows freak the hell out.
var fs = require("fs")
, path = require("path")
, symlink = path.resolve(__dirname, "fixtures/symlink")
try { fs.unlinkSync(symlink) } catch (e) {}
fs.symlinkSync("./hardlink-1", symlink)
process.on("exit", function () {
fs.unlinkSync(symlink)
})
var tap = require("tap")
, tar = require("../tar.js")
, pkg = require("../package.json")
, Pack = tar.Pack
, fstream = require("fstream")
, Reader = fstream.Reader
, Writer = fstream.Writer
, input = path.resolve(__dirname, "fixtures/")
, target = path.resolve(__dirname, "tmp/pack.tar")
, uid = process.getuid ? process.getuid() : 0
, gid = process.getgid ? process.getgid() : 0
, entries =
// the global header and root fixtures/ dir are going to get
// a different date each time, so omit that bit.
// Also, dev/ino values differ across machines, so that's not
// included.
[ [ 'entry',
{ path: 'fixtures/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'extendedHeader',
{ path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: uid,
gid: gid,
type: 'x',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' },
{ path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
uid: uid,
gid: gid,
size: 200 } ]
, [ 'entry',
{ path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: uid,
gid: gid,
size: 200,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/a.txt',
mode: 420,
uid: uid,
gid: gid,
size: 257,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/b.txt',
mode: 420,
uid: uid,
gid: gid,
size: 512,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/c.txt',
mode: 420,
uid: uid,
gid: gid,
size: 513,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/cc.txt',
mode: 420,
uid: uid,
gid: gid,
size: 513,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/dir/',
mode: 488,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/dir/sub/',
mode: 488,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/foo.js',
mode: 420,
uid: uid,
gid: gid,
size: 4,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/hardlink-1',
mode: 420,
uid: uid,
gid: gid,
size: 200,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/hardlink-2',
mode: 420,
uid: uid,
gid: gid,
size: 0,
type: '1',
linkpath: 'fixtures/hardlink-1',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/omega.txt',
mode: 420,
uid: uid,
gid: gid,
size: 2,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/packtest/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/packtest/omega.txt',
mode: 420,
uid: uid,
gid: gid,
size: 2,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/packtest/star.4.html',
mode: 420,
uid: uid,
gid: gid,
size: 54081,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'extendedHeader',
{ path: 'PaxHeader/fixtures/packtest/Ω.txt',
mode: 420,
uid: uid,
gid: gid,
type: 'x',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' },
{ path: 'fixtures/packtest/Ω.txt',
uid: uid,
gid: gid,
size: 2 } ]
, [ 'entry',
{ path: 'fixtures/packtest/Ω.txt',
mode: 420,
uid: uid,
gid: gid,
size: 2,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: uid,
gid: gid,
size: 100,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/symlink',
uid: uid,
gid: gid,
size: 0,
type: '2',
linkpath: 'hardlink-1',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'extendedHeader',
{ path: 'PaxHeader/fixtures/Ω.txt',
mode: 420,
uid: uid,
gid: gid,
type: 'x',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' },
{ path: "fixtures/Ω.txt"
, uid: uid
, gid: gid
, size: 2 } ]
, [ 'entry',
{ path: 'fixtures/Ω.txt',
mode: 420,
uid: uid,
gid: gid,
size: 2,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
]
// first, make sure that the hardlinks are actually hardlinks, or this
// won't work. Git has a way of replacing them with a copy.
var hard1 = path.resolve(__dirname, "fixtures/hardlink-1")
, hard2 = path.resolve(__dirname, "fixtures/hardlink-2")
, fs = require("fs")
try { fs.unlinkSync(hard2) } catch (e) {}
fs.linkSync(hard1, hard2)
tap.test("with global header", { timeout: 10000 }, function (t) {
runTest(t, true)
})
tap.test("without global header", { timeout: 10000 }, function (t) {
runTest(t, false)
})
function alphasort (a, b) {
return a === b ? 0
: a.toLowerCase() > b.toLowerCase() ? 1
: a.toLowerCase() < b.toLowerCase() ? -1
: a > b ? 1
: -1
}
function runTest (t, doGH) {
var reader = Reader({ path: input
, filter: function () {
return !this.path.match(/\.(tar|hex)$/)
}
, sort: alphasort
})
var props = doGH ? pkg : {}
props.noProprietary = true
var pack = Pack(props)
var writer = Writer(target)
// global header should be skipped regardless, since it has no content.
var entry = 0
t.ok(reader, "reader ok")
t.ok(pack, "pack ok")
t.ok(writer, "writer ok")
pack.pipe(writer)
var parse = tar.Parse()
t.ok(parse, "parser should be ok")
pack.on("data", function (c) {
// console.error("PACK DATA")
if (c.length !== 512) {
// this one is too noisy, only assert if it'll be relevant
t.equal(c.length, 512, "parser should emit data in 512byte blocks")
}
parse.write(c)
})
pack.on("end", function () {
// console.error("PACK END")
t.pass("parser ends")
parse.end()
})
pack.on("error", function (er) {
t.fail("pack error", er)
})
parse.on("error", function (er) {
t.fail("parse error", er)
})
writer.on("error", function (er) {
t.fail("writer error", er)
})
reader.on("error", function (er) {
t.fail("reader error", er)
})
parse.on("*", function (ev, e) {
var wanted = entries[entry++]
if (!wanted) {
t.fail("unexpected event: "+ev)
return
}
t.equal(ev, wanted[0], "event type should be "+wanted[0])
if (ev !== wanted[0] || e.path !== wanted[1].path) {
console.error("wanted", wanted)
console.error([ev, e.props])
e.on("end", function () {
console.error(e.fields)
throw "break"
})
}
t.has(e.props, wanted[1], "properties "+wanted[1].path)
if (wanted[2]) {
e.on("end", function () {
if (!e.fields) {
t.ok(e.fields, "should get fields")
} else {
t.has(e.fields, wanted[2], "should get expected fields")
}
})
}
})
reader.pipe(pack)
writer.on("close", function () {
t.equal(entry, entries.length, "should get all expected entries")
t.pass("it finished")
t.end()
})
}

952
Client/node_modules/tar/test/pack.js generated vendored Executable file
View File

@@ -0,0 +1,952 @@
// the symlink file is excluded from git, because it makes
// windows freak the hell out.
var fs = require("fs")
, path = require("path")
, symlink = path.resolve(__dirname, "fixtures/symlink")
try { fs.unlinkSync(symlink) } catch (e) {}
fs.symlinkSync("./hardlink-1", symlink)
process.on("exit", function () {
fs.unlinkSync(symlink)
})
var tap = require("tap")
, tar = require("../tar.js")
, pkg = require("../package.json")
, Pack = tar.Pack
, fstream = require("fstream")
, Reader = fstream.Reader
, Writer = fstream.Writer
, input = path.resolve(__dirname, "fixtures/")
, target = path.resolve(__dirname, "tmp/pack.tar")
, uid = process.getuid ? process.getuid() : 0
, gid = process.getgid ? process.getgid() : 0
, entries =
// the global header and root fixtures/ dir are going to get
// a different date each time, so omit that bit.
// Also, dev/ino values differ across machines, so that's not
// included.
[ [ 'globalExtendedHeader',
{ path: 'PaxHeader/',
mode: 438,
uid: 0,
gid: 0,
type: 'g',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' },
{ "NODETAR.author": pkg.author,
"NODETAR.name": pkg.name,
"NODETAR.description": pkg.description,
"NODETAR.version": pkg.version,
"NODETAR.repository.type": pkg.repository.type,
"NODETAR.repository.url": pkg.repository.url,
"NODETAR.main": pkg.main,
"NODETAR.scripts.test": pkg.scripts.test } ]
, [ 'entry',
{ path: 'fixtures/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'extendedHeader',
{ path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: uid,
gid: gid,
type: 'x',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' },
{ path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
'NODETAR.depth': '1',
'NODETAR.type': 'File',
nlink: 1,
uid: uid,
gid: gid,
size: 200,
'NODETAR.blksize': '4096',
'NODETAR.blocks': '8' } ]
, [ 'entry',
{ path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: uid,
gid: gid,
size: 200,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '',
'NODETAR.depth': '1',
'NODETAR.type': 'File',
nlink: 1,
'NODETAR.blksize': '4096',
'NODETAR.blocks': '8' } ]
, [ 'entry',
{ path: 'fixtures/a.txt',
mode: 420,
uid: uid,
gid: gid,
size: 257,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/b.txt',
mode: 420,
uid: uid,
gid: gid,
size: 512,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/c.txt',
mode: 420,
uid: uid,
gid: gid,
size: 513,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/cc.txt',
mode: 420,
uid: uid,
gid: gid,
size: 513,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/dir/',
mode: 488,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/dir/sub/',
mode: 488,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/foo.js',
mode: 420,
uid: uid,
gid: gid,
size: 4,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/hardlink-1',
mode: 420,
uid: uid,
gid: gid,
size: 200,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/hardlink-2',
mode: 420,
uid: uid,
gid: gid,
size: 0,
type: '1',
linkpath: 'fixtures/hardlink-1',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/omega.txt',
mode: 420,
uid: uid,
gid: gid,
size: 2,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/packtest/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/packtest/omega.txt',
mode: 420,
uid: uid,
gid: gid,
size: 2,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/packtest/star.4.html',
mode: 420,
uid: uid,
gid: gid,
size: 54081,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'extendedHeader',
{ path: 'PaxHeader/fixtures/packtest/Ω.txt',
mode: 420,
uid: uid,
gid: gid,
type: 'x',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' },
{ path: 'fixtures/packtest/Ω.txt',
'NODETAR.depth': '2',
'NODETAR.type': 'File',
nlink: 1,
uid: uid,
gid: gid,
size: 2,
'NODETAR.blksize': '4096',
'NODETAR.blocks': '8' } ]
, [ 'entry',
{ path: 'fixtures/packtest/Ω.txt',
mode: 420,
uid: uid,
gid: gid,
size: 2,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '',
'NODETAR.depth': '2',
'NODETAR.type': 'File',
nlink: 1,
'NODETAR.blksize': '4096',
'NODETAR.blocks': '8' } ]
, [ 'entry',
{ path: 'fixtures/r/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/',
mode: 493,
uid: uid,
gid: gid,
size: 0,
type: '5',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: uid,
gid: gid,
size: 100,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'entry',
{ path: 'fixtures/symlink',
uid: uid,
gid: gid,
size: 0,
type: '2',
linkpath: 'hardlink-1',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' } ]
, [ 'extendedHeader',
{ path: 'PaxHeader/fixtures/Ω.txt',
mode: 420,
uid: uid,
gid: gid,
type: 'x',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '' },
{ path: "fixtures/Ω.txt"
, "NODETAR.depth": "1"
, "NODETAR.type": "File"
, nlink: 1
, uid: uid
, gid: gid
, size: 2
, "NODETAR.blksize": "4096"
, "NODETAR.blocks": "8" } ]
, [ 'entry',
{ path: 'fixtures/Ω.txt',
mode: 420,
uid: uid,
gid: gid,
size: 2,
type: '0',
linkpath: '',
ustar: 'ustar\u0000',
ustarver: '00',
uname: '',
gname: '',
devmaj: 0,
devmin: 0,
fill: '',
'NODETAR.depth': '1',
'NODETAR.type': 'File',
nlink: 1,
'NODETAR.blksize': '4096',
'NODETAR.blocks': '8' } ]
]
// first, make sure that the hardlinks are actually hardlinks, or this
// won't work. Git has a way of replacing them with a copy.
var hard1 = path.resolve(__dirname, "fixtures/hardlink-1")
, hard2 = path.resolve(__dirname, "fixtures/hardlink-2")
, fs = require("fs")
try { fs.unlinkSync(hard2) } catch (e) {}
fs.linkSync(hard1, hard2)
tap.test("with global header", { timeout: 10000 }, function (t) {
runTest(t, true)
})
tap.test("without global header", { timeout: 10000 }, function (t) {
runTest(t, false)
})
tap.test("with from base", { timeout: 10000 }, function (t) {
runTest(t, true, true)
})
function alphasort (a, b) {
return a === b ? 0
: a.toLowerCase() > b.toLowerCase() ? 1
: a.toLowerCase() < b.toLowerCase() ? -1
: a > b ? 1
: -1
}
function runTest (t, doGH, doFromBase) {
var reader = Reader({ path: input
, filter: function () {
return !this.path.match(/\.(tar|hex)$/)
}
, sort: alphasort
})
var props = doGH ? pkg : {}
if(doFromBase) props.fromBase = true;
var pack = Pack(props)
var writer = Writer(target)
// skip the global header if we're not doing that.
var entry = doGH ? 0 : 1
t.ok(reader, "reader ok")
t.ok(pack, "pack ok")
t.ok(writer, "writer ok")
pack.pipe(writer)
var parse = tar.Parse()
t.ok(parse, "parser should be ok")
pack.on("data", function (c) {
// console.error("PACK DATA")
if (c.length !== 512) {
// this one is too noisy, only assert if it'll be relevant
t.equal(c.length, 512, "parser should emit data in 512byte blocks")
}
parse.write(c)
})
pack.on("end", function () {
// console.error("PACK END")
t.pass("parser ends")
parse.end()
})
pack.on("error", function (er) {
t.fail("pack error", er)
})
parse.on("error", function (er) {
t.fail("parse error", er)
})
writer.on("error", function (er) {
t.fail("writer error", er)
})
reader.on("error", function (er) {
t.fail("reader error", er)
})
parse.on("*", function (ev, e) {
var wanted = entries[entry++]
if (!wanted) {
t.fail("unexpected event: "+ev)
return
}
t.equal(ev, wanted[0], "event type should be "+wanted[0])
if(doFromBase) {
if(wanted[1].path.indexOf('fixtures/') && wanted[1].path.length == 100)
wanted[1].path = wanted[1].path.replace('fixtures/', '') + 'ccccccccc'
if(wanted[1]) wanted[1].path = wanted[1].path.replace('fixtures/', '').replace('//', '/')
if(wanted[1].path == '') wanted[1].path = '/'
if(wanted[2] && wanted[2].path) wanted[2].path = wanted[2].path.replace('fixtures', '').replace(/^\//, '')
wanted[1].linkpath = wanted[1].linkpath.replace('fixtures/', '')
}
if (ev !== wanted[0] || e.path !== wanted[1].path) {
console.error("wanted", wanted)
console.error([ev, e.props])
e.on("end", function () {
console.error(e.fields)
throw "break"
})
}
t.has(e.props, wanted[1], "properties "+wanted[1].path)
if (wanted[2]) {
e.on("end", function () {
if (!e.fields) {
t.ok(e.fields, "should get fields")
} else {
t.has(e.fields, wanted[2], "should get expected fields")
}
})
}
})
reader.pipe(pack)
writer.on("close", function () {
t.equal(entry, entries.length, "should get all expected entries")
t.pass("it finished")
t.end()
})
}

29
Client/node_modules/tar/test/parse-discard.js generated vendored Executable file
View File

@@ -0,0 +1,29 @@
var tap = require("tap")
, tar = require("../tar.js")
, fs = require("fs")
, path = require("path")
, file = path.resolve(__dirname, "fixtures/c.tar")
tap.test("parser test", function (t) {
var parser = tar.Parse()
var total = 0
var dataTotal = 0
parser.on("end", function () {
t.equals(total-513,dataTotal,'should have discarded only c.txt')
t.end()
})
fs.createReadStream(file)
.pipe(parser)
.on('entry',function(entry){
if(entry.path === 'c.txt') entry.abort()
total += entry.size;
entry.on('data',function(data){
dataTotal += data.length
})
})
})

359
Client/node_modules/tar/test/parse.js generated vendored Executable file
View File

@@ -0,0 +1,359 @@
var tap = require("tap")
, tar = require("../tar.js")
, fs = require("fs")
, path = require("path")
, file = path.resolve(__dirname, "fixtures/c.tar")
, index = 0
, expect =
[ [ 'entry',
{ path: 'c.txt',
mode: 420,
uid: 24561,
gid: 20,
size: 513,
mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'),
cksum: 5422,
type: '0',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '' },
undefined ],
[ 'entry',
{ path: 'cc.txt',
mode: 420,
uid: 24561,
gid: 20,
size: 513,
mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'),
cksum: 5525,
type: '0',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '' },
undefined ],
[ 'entry',
{ path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: 24561,
gid: 20,
size: 100,
mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'),
cksum: 18124,
type: '0',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '' },
undefined ],
[ 'entry',
{ path: 'Ω.txt',
mode: 420,
uid: 24561,
gid: 20,
size: 2,
mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
cksum: 5695,
type: '0',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '' },
undefined ],
[ 'extendedHeader',
{ path: 'PaxHeader/Ω.txt',
mode: 420,
uid: 24561,
gid: 20,
size: 120,
mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
cksum: 6702,
type: 'x',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '' },
{ path: 'Ω.txt',
ctime: 1319737909,
atime: 1319739061,
dev: 234881026,
ino: 51693379,
nlink: 1 } ],
[ 'entry',
{ path: 'Ω.txt',
mode: 420,
uid: 24561,
gid: 20,
size: 2,
mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
cksum: 5695,
type: '0',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '',
ctime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
atime: new Date('Thu, 27 Oct 2011 18:11:01 GMT'),
dev: 234881026,
ino: 51693379,
nlink: 1 },
undefined ],
[ 'extendedHeader',
{ path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: 24561,
gid: 20,
size: 353,
mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
cksum: 14488,
type: 'x',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '' },
{ path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
ctime: 1319686868,
atime: 1319741254,
'LIBARCHIVE.creationtime': '1319686852',
dev: 234881026,
ino: 51681874,
nlink: 1 } ],
[ 'entry',
{ path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: 24561,
gid: 20,
size: 200,
mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
cksum: 14570,
type: '0',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '',
ctime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
atime: new Date('Thu, 27 Oct 2011 18:47:34 GMT'),
'LIBARCHIVE.creationtime': '1319686852',
dev: 234881026,
ino: 51681874,
nlink: 1 },
undefined ],
[ 'longPath',
{ path: '././@LongLink',
mode: 0,
uid: 0,
gid: 0,
size: 201,
mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
cksum: 4976,
type: 'L',
linkpath: '',
ustar: false },
'200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
[ 'entry',
{ path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: 1000,
gid: 1000,
size: 201,
mtime: new Date('Thu, 27 Oct 2011 22:21:50 GMT'),
cksum: 14086,
type: '0',
linkpath: '',
ustar: false },
undefined ],
[ 'longLinkpath',
{ path: '././@LongLink',
mode: 0,
uid: 0,
gid: 0,
size: 201,
mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
cksum: 4975,
type: 'K',
linkpath: '',
ustar: false },
'200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
[ 'longPath',
{ path: '././@LongLink',
mode: 0,
uid: 0,
gid: 0,
size: 201,
mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
cksum: 4976,
type: 'L',
linkpath: '',
ustar: false },
'200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL' ],
[ 'entry',
{ path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
mode: 511,
uid: 1000,
gid: 1000,
size: 0,
mtime: new Date('Fri, 28 Oct 2011 23:05:17 GMT'),
cksum: 21603,
type: '2',
linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
ustar: false },
undefined ],
[ 'extendedHeader',
{ path: 'PaxHeader/200-hard',
mode: 420,
uid: 24561,
gid: 20,
size: 143,
mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
cksum: 6533,
type: 'x',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '' },
{ ctime: 1320617144,
atime: 1320617232,
'LIBARCHIVE.creationtime': '1319686852',
dev: 234881026,
ino: 51681874,
nlink: 2 } ],
[ 'entry',
{ path: '200-hard',
mode: 420,
uid: 24561,
gid: 20,
size: 200,
mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
cksum: 5526,
type: '0',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '',
ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
atime: new Date('Sun, 06 Nov 2011 22:07:12 GMT'),
'LIBARCHIVE.creationtime': '1319686852',
dev: 234881026,
ino: 51681874,
nlink: 2 },
undefined ],
[ 'extendedHeader',
{ path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: 24561,
gid: 20,
size: 353,
mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
cksum: 14488,
type: 'x',
linkpath: '',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '' },
{ path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
ctime: 1320617144,
atime: 1320617406,
'LIBARCHIVE.creationtime': '1319686852',
dev: 234881026,
ino: 51681874,
nlink: 2 } ],
[ 'entry',
{ path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
mode: 420,
uid: 24561,
gid: 20,
size: 0,
mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
cksum: 15173,
type: '1',
linkpath: '200-hard',
ustar: 'ustar\0',
ustarver: '00',
uname: 'isaacs',
gname: 'staff',
devmaj: 0,
devmin: 0,
fill: '',
ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
atime: new Date('Sun, 06 Nov 2011 22:10:06 GMT'),
'LIBARCHIVE.creationtime': '1319686852',
dev: 234881026,
ino: 51681874,
nlink: 2 },
undefined ] ]
tap.test("parser test", function (t) {
var parser = tar.Parse()
parser.on("end", function () {
t.equal(index, expect.length, "saw all expected events")
t.end()
})
fs.createReadStream(file)
.pipe(parser)
.on("*", function (ev, entry) {
var wanted = expect[index]
if (!wanted) {
return t.fail("Unexpected event: " + ev)
}
var result = [ev, entry.props]
entry.on("end", function () {
result.push(entry.fields || entry.body)
t.equal(ev, wanted[0], index + " event type")
t.equivalent(entry.props, wanted[1], wanted[1].path + " entry properties")
if (wanted[2]) {
t.equivalent(result[2], wanted[2], "metadata values")
}
index ++
})
})
})

20
Client/node_modules/tar/test/zz-cleanup.js generated vendored Executable file
View File

@@ -0,0 +1,20 @@
// clean up the fixtures
var tap = require("tap")
, rimraf = require("rimraf")
, test = tap.test
, path = require("path")
test("clean fixtures", function (t) {
rimraf(path.resolve(__dirname, "fixtures"), function (er) {
t.ifError(er, "rimraf ./fixtures/")
t.end()
})
})
test("clean tmp", function (t) {
rimraf(path.resolve(__dirname, "tmp"), function (er) {
t.ifError(er, "rimraf ./tmp/")
t.end()
})
})