lauraF 3 years ago
parent
commit
f954e2e8a6
  1. 41
      node_modules/.package-lock.json
  2. 16
      node_modules/busboy/.travis.yml
  3. 19
      node_modules/busboy/LICENSE
  4. 222
      node_modules/busboy/README.md
  5. 73
      node_modules/busboy/deps/encoding/encoding-indexes.js
  6. 2391
      node_modules/busboy/deps/encoding/encoding.js
  7. 88
      node_modules/busboy/lib/main.js
  8. 325
      node_modules/busboy/lib/types/multipart.js
  9. 214
      node_modules/busboy/lib/types/urlencoded.js
  10. 172
      node_modules/busboy/lib/utils.js
  11. 16
      node_modules/busboy/package.json
  12. 80
      node_modules/busboy/test/test-types-multipart-stream-pause.js
  13. 343
      node_modules/busboy/test/test-types-multipart.js
  14. 183
      node_modules/busboy/test/test-types-urlencoded.js
  15. 66
      node_modules/busboy/test/test-utils-decoder.js
  16. 96
      node_modules/busboy/test/test-utils-parse-params.js
  17. 4
      node_modules/busboy/test/test.js
  18. 16
      node_modules/dicer/.travis.yml
  19. 19
      node_modules/dicer/LICENSE
  20. 122
      node_modules/dicer/README.md
  21. 63
      node_modules/dicer/bench/dicer-bench-multipart-parser.js
  22. 70
      node_modules/dicer/bench/formidable-bench-multipart-parser.js
  23. 56
      node_modules/dicer/bench/multipartser-bench-multipart-parser.js
  24. 76
      node_modules/dicer/bench/multiparty-bench-multipart-parser.js
  25. 63
      node_modules/dicer/bench/parted-bench-multipart-parser.js
  26. 485
      node_modules/dicer/bench/parted-multipart.js
  27. 239
      node_modules/dicer/lib/Dicer.js
  28. 110
      node_modules/dicer/lib/HeaderParser.js
  29. 11
      node_modules/dicer/lib/PartStream.js
  30. 16
      node_modules/dicer/package.json
  31. 31
      node_modules/dicer/test/fixtures/many-noend/original
  32. 1
      node_modules/dicer/test/fixtures/many-noend/part1
  33. 1
      node_modules/dicer/test/fixtures/many-noend/part1.header
  34. 0
      node_modules/dicer/test/fixtures/many-noend/part2
  35. 1
      node_modules/dicer/test/fixtures/many-noend/part2.header
  36. 0
      node_modules/dicer/test/fixtures/many-noend/part3
  37. 1
      node_modules/dicer/test/fixtures/many-noend/part3.header
  38. 0
      node_modules/dicer/test/fixtures/many-noend/part4
  39. 1
      node_modules/dicer/test/fixtures/many-noend/part4.header
  40. 3
      node_modules/dicer/test/fixtures/many-noend/part5
  41. 1
      node_modules/dicer/test/fixtures/many-noend/part5.header
  42. 1
      node_modules/dicer/test/fixtures/many-noend/part6
  43. 1
      node_modules/dicer/test/fixtures/many-noend/part6.header
  44. 2
      node_modules/dicer/test/fixtures/many-noend/part7.header
  45. 32
      node_modules/dicer/test/fixtures/many-wrongboundary/original
  46. 33
      node_modules/dicer/test/fixtures/many-wrongboundary/preamble
  47. 1
      node_modules/dicer/test/fixtures/many-wrongboundary/preamble.error
  48. 32
      node_modules/dicer/test/fixtures/many/original
  49. 1
      node_modules/dicer/test/fixtures/many/part1
  50. 1
      node_modules/dicer/test/fixtures/many/part1.header
  51. 0
      node_modules/dicer/test/fixtures/many/part2
  52. 1
      node_modules/dicer/test/fixtures/many/part2.header
  53. 0
      node_modules/dicer/test/fixtures/many/part3
  54. 1
      node_modules/dicer/test/fixtures/many/part3.header
  55. 0
      node_modules/dicer/test/fixtures/many/part4
  56. 1
      node_modules/dicer/test/fixtures/many/part4.header
  57. 3
      node_modules/dicer/test/fixtures/many/part5
  58. 1
      node_modules/dicer/test/fixtures/many/part5.header
  59. 0
      node_modules/dicer/test/fixtures/many/part6
  60. 2
      node_modules/dicer/test/fixtures/many/part6.header
  61. 1
      node_modules/dicer/test/fixtures/many/part7
  62. 1
      node_modules/dicer/test/fixtures/many/part7.header
  63. 24
      node_modules/dicer/test/fixtures/nested-full/original
  64. 1
      node_modules/dicer/test/fixtures/nested-full/part1
  65. 1
      node_modules/dicer/test/fixtures/nested-full/part1.header
  66. 12
      node_modules/dicer/test/fixtures/nested-full/part2
  67. 2
      node_modules/dicer/test/fixtures/nested-full/part2.header
  68. 2
      node_modules/dicer/test/fixtures/nested-full/preamble.header
  69. 21
      node_modules/dicer/test/fixtures/nested/original
  70. 1
      node_modules/dicer/test/fixtures/nested/part1
  71. 1
      node_modules/dicer/test/fixtures/nested/part1.header
  72. 12
      node_modules/dicer/test/fixtures/nested/part2
  73. 2
      node_modules/dicer/test/fixtures/nested/part2.header
  74. 87
      node_modules/dicer/test/test-endfinish.js
  75. 68
      node_modules/dicer/test/test-headerparser.js
  76. 148
      node_modules/dicer/test/test-multipart-extra-trailer.js
  77. 228
      node_modules/dicer/test/test-multipart-nolisteners.js
  78. 240
      node_modules/dicer/test/test-multipart.js
  79. 4
      node_modules/dicer/test/test.js
  80. 32
      node_modules/express-fileupload/.circleci/config.yml
  81. 1
      node_modules/express-fileupload/.eslintignore
  82. 23
      node_modules/express-fileupload/.eslintrc
  83. 1
      node_modules/express-fileupload/.prettierrc
  84. 22
      node_modules/express-fileupload/LICENSE
  85. 125
      node_modules/express-fileupload/README.md
  86. 70
      node_modules/express-fileupload/example/README.md
  87. 12
      node_modules/express-fileupload/example/index.html
  88. 41
      node_modules/express-fileupload/example/server.js
  89. 1
      node_modules/express-fileupload/example/uploads/placeholder.txt
  90. 65
      node_modules/express-fileupload/lib/fileFactory.js
  91. 39
      node_modules/express-fileupload/lib/index.js
  92. 34
      node_modules/express-fileupload/lib/isEligibleRequest.js
  93. 42
      node_modules/express-fileupload/lib/memHandler.js
  94. 167
      node_modules/express-fileupload/lib/processMultipart.js
  95. 35
      node_modules/express-fileupload/lib/processNested.js
  96. 64
      node_modules/express-fileupload/lib/tempFileHandler.js
  97. 26
      node_modules/express-fileupload/lib/uploadtimer.js
  98. 311
      node_modules/express-fileupload/lib/utilities.js
  99. 42
      node_modules/express-fileupload/package.json
  100. 78
      node_modules/express-fileupload/test/fileFactory.spec.js

41
node_modules/.package-lock.json

@ -74,6 +74,17 @@
"@popperjs/core": "^2.10.2"
}
},
"node_modules/busboy": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/busboy/-/busboy-0.3.1.tgz",
"integrity": "sha512-y7tTxhGKXcyBxRKAni+awqx8uqaJKrSFSNFSeRG5CsWNdmy2BIK+6VGWEW7TZnIO/533mtMEA4rOevQV815YJw==",
"dependencies": {
"dicer": "0.3.0"
},
"engines": {
"node": ">=4.5.0"
}
},
"node_modules/bytes": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz",
@ -144,6 +155,17 @@
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz",
"integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA="
},
"node_modules/dicer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/dicer/-/dicer-0.3.0.tgz",
"integrity": "sha512-MdceRRWqltEG2dZqO769g27N/3PXfcKl04VhYnBlo2YhH7zPi88VebsjTKclaOyiuMaGU72hTfw3VkUitGcVCA==",
"dependencies": {
"streamsearch": "0.1.2"
},
"engines": {
"node": ">=4.5.0"
}
},
"node_modules/ee-first": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
@ -218,6 +240,17 @@
"node": ">= 0.10.0"
}
},
"node_modules/express-fileupload": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/express-fileupload/-/express-fileupload-1.3.1.tgz",
"integrity": "sha512-LD1yabD3exmWIFujKGDnT1rmxSomaqQSlUvzIsrA1ZgwCJ6ci7lg2YHFGM3Q6DfK+Yk0gAVU7GWLE7qDMwZLkw==",
"dependencies": {
"busboy": "^0.3.1"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/express/node_modules/cookie": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz",
@ -507,6 +540,14 @@
"node": ">= 0.6"
}
},
"node_modules/streamsearch": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz",
"integrity": "sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=",
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/type-is": {
"version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",

16
node_modules/busboy/.travis.yml

@ -0,0 +1,16 @@
sudo: false
language: cpp
notifications:
email: false
env:
matrix:
- TRAVIS_NODE_VERSION="4"
- TRAVIS_NODE_VERSION="6"
- TRAVIS_NODE_VERSION="8"
- TRAVIS_NODE_VERSION="10"
install:
- rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && source ~/.nvm/nvm.sh && nvm install $TRAVIS_NODE_VERSION
- node --version
- npm --version
- npm install
script: npm test

19
node_modules/busboy/LICENSE

@ -0,0 +1,19 @@
Copyright Brian White. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.

222
node_modules/busboy/README.md

@ -0,0 +1,222 @@
Description
===========
A node.js module for parsing incoming HTML form data.
Requirements
============
* [node.js](http://nodejs.org/) -- v4.5.0 or newer
Install
=======
npm install busboy
Examples
========
* Parsing (multipart) with default options:
```javascript
var http = require('http'),
inspect = require('util').inspect;
var Busboy = require('busboy');
http.createServer(function(req, res) {
if (req.method === 'POST') {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
console.log('File [' + fieldname + ']: filename: ' + filename + ', encoding: ' + encoding + ', mimetype: ' + mimetype);
file.on('data', function(data) {
console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
});
file.on('end', function() {
console.log('File [' + fieldname + '] Finished');
});
});
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) {
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
});
busboy.on('finish', function() {
console.log('Done parsing form!');
res.writeHead(303, { Connection: 'close', Location: '/' });
res.end();
});
req.pipe(busboy);
} else if (req.method === 'GET') {
res.writeHead(200, { Connection: 'close' });
res.end('<html><head></head><body>\
<form method="POST" enctype="multipart/form-data">\
<input type="text" name="textfield"><br />\
<input type="file" name="filefield"><br />\
<input type="submit">\
</form>\
</body></html>');
}
}).listen(8000, function() {
console.log('Listening for requests');
});
// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file:
//
// Listening for requests
// File [filefield]: filename: ryan-speaker.jpg, encoding: binary
// File [filefield] got 11971 bytes
// Field [textfield]: value: 'testing! :-)'
// File [filefield] Finished
// Done parsing form!
```
* Save all incoming files to disk:
```javascript
var http = require('http'),
path = require('path'),
os = require('os'),
fs = require('fs');
var Busboy = require('busboy');
http.createServer(function(req, res) {
if (req.method === 'POST') {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
var saveTo = path.join(os.tmpDir(), path.basename(fieldname));
file.pipe(fs.createWriteStream(saveTo));
});
busboy.on('finish', function() {
res.writeHead(200, { 'Connection': 'close' });
res.end("That's all folks!");
});
return req.pipe(busboy);
}
res.writeHead(404);
res.end();
}).listen(8000, function() {
console.log('Listening for requests');
});
```
* Parsing (urlencoded) with default options:
```javascript
var http = require('http'),
inspect = require('util').inspect;
var Busboy = require('busboy');
http.createServer(function(req, res) {
if (req.method === 'POST') {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
console.log('File [' + fieldname + ']: filename: ' + filename);
file.on('data', function(data) {
console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
});
file.on('end', function() {
console.log('File [' + fieldname + '] Finished');
});
});
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
});
busboy.on('finish', function() {
console.log('Done parsing form!');
res.writeHead(303, { Connection: 'close', Location: '/' });
res.end();
});
req.pipe(busboy);
} else if (req.method === 'GET') {
res.writeHead(200, { Connection: 'close' });
res.end('<html><head></head><body>\
<form method="POST">\
<input type="text" name="textfield"><br />\
<select name="selectfield">\
<option value="1">1</option>\
<option value="10">10</option>\
<option value="100">100</option>\
<option value="9001">9001</option>\
</select><br />\
<input type="checkbox" name="checkfield">Node.js rules!<br />\
<input type="submit">\
</form>\
</body></html>');
}
}).listen(8000, function() {
console.log('Listening for requests');
});
// Example output:
//
// Listening for requests
// Field [textfield]: value: 'testing! :-)'
// Field [selectfield]: value: '9001'
// Field [checkfield]: value: 'on'
// Done parsing form!
```
API
===
_Busboy_ is a _Writable_ stream
Busboy (special) events
-----------------------
* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream.
* Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits).
* If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found.
* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
Busboy methods
--------------
* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance.
* The constructor takes the following valid `config` settings:
* **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers.
* **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default).
* **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default).
* **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8').
* **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false).
* **limits** - _object_ - Various limits on incoming data. Valid properties are:
* **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes).
* **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1MB).
* **fields** - _integer_ - Max number of non-file fields (Default: Infinity).
* **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity).
* **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity).
* **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity).
* **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000 (same as node's http).
* The constructor can throw errors:
* **Unsupported content type: $type** - The `Content-Type` isn't one Busboy can parse.
* **Missing Content-Type** - The provided headers don't include `Content-Type` at all.

73
node_modules/busboy/deps/encoding/encoding-indexes.js
File diff suppressed because it is too large
View File

2391
node_modules/busboy/deps/encoding/encoding.js
File diff suppressed because it is too large
View File

88
node_modules/busboy/lib/main.js

@ -0,0 +1,88 @@
var fs = require('fs'),
WritableStream = require('stream').Writable,
inherits = require('util').inherits;
var parseParams = require('./utils').parseParams;
function Busboy(opts) {
if (!(this instanceof Busboy))
return new Busboy(opts);
if (opts.highWaterMark !== undefined)
WritableStream.call(this, { highWaterMark: opts.highWaterMark });
else
WritableStream.call(this);
this._done = false;
this._parser = undefined;
this._finished = false;
this.opts = opts;
if (opts.headers && typeof opts.headers['content-type'] === 'string')
this.parseHeaders(opts.headers);
else
throw new Error('Missing Content-Type');
}
inherits(Busboy, WritableStream);
Busboy.prototype.emit = function(ev) {
if (ev === 'finish') {
if (!this._done) {
this._parser && this._parser.end();
return;
} else if (this._finished) {
return;
}
this._finished = true;
}
WritableStream.prototype.emit.apply(this, arguments);
};
Busboy.prototype.parseHeaders = function(headers) {
this._parser = undefined;
if (headers['content-type']) {
var parsed = parseParams(headers['content-type']),
matched, type;
for (var i = 0; i < TYPES.length; ++i) {
type = TYPES[i];
if (typeof type.detect === 'function')
matched = type.detect(parsed);
else
matched = type.detect.test(parsed[0]);
if (matched)
break;
}
if (matched) {
var cfg = {
limits: this.opts.limits,
headers: headers,
parsedConType: parsed,
highWaterMark: undefined,
fileHwm: undefined,
defCharset: undefined,
preservePath: false
};
if (this.opts.highWaterMark)
cfg.highWaterMark = this.opts.highWaterMark;
if (this.opts.fileHwm)
cfg.fileHwm = this.opts.fileHwm;
cfg.defCharset = this.opts.defCharset;
cfg.preservePath = this.opts.preservePath;
this._parser = type(this, cfg);
return;
}
}
throw new Error('Unsupported content type: ' + headers['content-type']);
};
Busboy.prototype._write = function(chunk, encoding, cb) {
if (!this._parser)
return cb(new Error('Not ready to parse. Missing Content-Type?'));
this._parser.write(chunk, cb);
};
var TYPES = [
require('./types/multipart'),
require('./types/urlencoded'),
];
module.exports = Busboy;

325
node_modules/busboy/lib/types/multipart.js

@ -0,0 +1,325 @@
// TODO:
// * support 1 nested multipart level
// (see second multipart example here:
// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
// * support limits.fieldNameSize
// -- this will require modifications to utils.parseParams
var ReadableStream = require('stream').Readable,
inherits = require('util').inherits;
var Dicer = require('dicer');
var parseParams = require('../utils').parseParams,
decodeText = require('../utils').decodeText,
basename = require('../utils').basename;
var RE_BOUNDARY = /^boundary$/i,
RE_FIELD = /^form-data$/i,
RE_CHARSET = /^charset$/i,
RE_FILENAME = /^filename$/i,
RE_NAME = /^name$/i;
Multipart.detect = /^multipart\/form-data/i;
function Multipart(boy, cfg) {
if (!(this instanceof Multipart))
return new Multipart(boy, cfg);
var i,
len,
self = this,
boundary,
limits = cfg.limits,
parsedConType = cfg.parsedConType || [],
defCharset = cfg.defCharset || 'utf8',
preservePath = cfg.preservePath,
fileopts = (typeof cfg.fileHwm === 'number'
? { highWaterMark: cfg.fileHwm }
: {});
for (i = 0, len = parsedConType.length; i < len; ++i) {
if (Array.isArray(parsedConType[i])
&& RE_BOUNDARY.test(parsedConType[i][0])) {
boundary = parsedConType[i][1];
break;
}
}
function checkFinished() {
if (nends === 0 && finished && !boy._done) {
finished = false;
process.nextTick(function() {
boy._done = true;
boy.emit('finish');
});
}
}
if (typeof boundary !== 'string')
throw new Error('Multipart: Boundary not found');
var fieldSizeLimit = (limits && typeof limits.fieldSize === 'number'
? limits.fieldSize
: 1 * 1024 * 1024),
fileSizeLimit = (limits && typeof limits.fileSize === 'number'
? limits.fileSize
: Infinity),
filesLimit = (limits && typeof limits.files === 'number'
? limits.files
: Infinity),
fieldsLimit = (limits && typeof limits.fields === 'number'
? limits.fields
: Infinity),
partsLimit = (limits && typeof limits.parts === 'number'
? limits.parts
: Infinity);
var nfiles = 0,
nfields = 0,
nends = 0,
curFile,
curField,
finished = false;
this._needDrain = false;
this._pause = false;
this._cb = undefined;
this._nparts = 0;
this._boy = boy;
var parserCfg = {
boundary: boundary,
maxHeaderPairs: (limits && limits.headerPairs)
};
if (fileopts.highWaterMark)
parserCfg.partHwm = fileopts.highWaterMark;
if (cfg.highWaterMark)
parserCfg.highWaterMark = cfg.highWaterMark;
this.parser = new Dicer(parserCfg);
this.parser.on('drain', function() {
self._needDrain = false;
if (self._cb && !self._pause) {
var cb = self._cb;
self._cb = undefined;
cb();
}
}).on('part', function onPart(part) {
if (++self._nparts > partsLimit) {
self.parser.removeListener('part', onPart);
self.parser.on('part', skipPart);
boy.hitPartsLimit = true;
boy.emit('partsLimit');
return skipPart(part);
}
// hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
// us emit 'end' early since we know the part has ended if we are already
// seeing the next part
if (curField) {
var field = curField;
field.emit('end');
field.removeAllListeners('end');
}
part.on('header', function(header) {
var contype,
fieldname,
parsed,
charset,
encoding,
filename,
nsize = 0;
if (header['content-type']) {
parsed = parseParams(header['content-type'][0]);
if (parsed[0]) {
contype = parsed[0].toLowerCase();
for (i = 0, len = parsed.length; i < len; ++i) {
if (RE_CHARSET.test(parsed[i][0])) {
charset = parsed[i][1].toLowerCase();
break;
}
}
}
}
if (contype === undefined)
contype = 'text/plain';
if (charset === undefined)
charset = defCharset;
if (header['content-disposition']) {
parsed = parseParams(header['content-disposition'][0]);
if (!RE_FIELD.test(parsed[0]))
return skipPart(part);
for (i = 0, len = parsed.length; i < len; ++i) {
if (RE_NAME.test(parsed[i][0])) {
fieldname = decodeText(parsed[i][1], 'binary', 'utf8');
} else if (RE_FILENAME.test(parsed[i][0])) {
filename = decodeText(parsed[i][1], 'binary', 'utf8');
if (!preservePath)
filename = basename(filename);
}
}
} else
return skipPart(part);
if (header['content-transfer-encoding'])
encoding = header['content-transfer-encoding'][0].toLowerCase();
else
encoding = '7bit';
var onData,
onEnd;
if (contype === 'application/octet-stream' || filename !== undefined) {
// file/binary field
if (nfiles === filesLimit) {
if (!boy.hitFilesLimit) {
boy.hitFilesLimit = true;
boy.emit('filesLimit');
}
return skipPart(part);
}
++nfiles;
if (!boy._events.file) {
self.parser._ignore();
return;
}
++nends;
var file = new FileStream(fileopts);
curFile = file;
file.on('end', function() {
--nends;
self._pause = false;
checkFinished();
if (self._cb && !self._needDrain) {
var cb = self._cb;
self._cb = undefined;
cb();
}
});
file._read = function(n) {
if (!self._pause)
return;
self._pause = false;
if (self._cb && !self._needDrain) {
var cb = self._cb;
self._cb = undefined;
cb();
}
};
boy.emit('file', fieldname, file, filename, encoding, contype);
onData = function(data) {
if ((nsize += data.length) > fileSizeLimit) {
var extralen = (fileSizeLimit - (nsize - data.length));
if (extralen > 0)
file.push(data.slice(0, extralen));
file.emit('limit');
file.truncated = true;
part.removeAllListeners('data');
} else if (!file.push(data))
self._pause = true;
};
onEnd = function() {
curFile = undefined;
file.push(null);
};
} else {
// non-file field
if (nfields === fieldsLimit) {
if (!boy.hitFieldsLimit) {
boy.hitFieldsLimit = true;
boy.emit('fieldsLimit');
}
return skipPart(part);
}
++nfields;
++nends;
var buffer = '',
truncated = false;
curField = part;
onData = function(data) {
if ((nsize += data.length) > fieldSizeLimit) {
var extralen = (fieldSizeLimit - (nsize - data.length));
buffer += data.toString('binary', 0, extralen);
truncated = true;
part.removeAllListeners('data');
} else
buffer += data.toString('binary');
};
onEnd = function() {
curField = undefined;
if (buffer.length)
buffer = decodeText(buffer, 'binary', charset);
boy.emit('field', fieldname, buffer, false, truncated, encoding, contype);
--nends;
checkFinished();
};
}
/* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
broken. Streams2/streams3 is a huge black box of confusion, but
somehow overriding the sync state seems to fix things again (and still
seems to work for previous node versions).
*/
part._readableState.sync = false;
part.on('data', onData);
part.on('end', onEnd);
}).on('error', function(err) {
if (curFile)
curFile.emit('error', err);
});
}).on('error', function(err) {
boy.emit('error', err);
}).on('finish', function() {
finished = true;
checkFinished();
});
}
Multipart.prototype.write = function(chunk, cb) {
var r;
if ((r = this.parser.write(chunk)) && !this._pause)
cb();
else {
this._needDrain = !r;
this._cb = cb;
}
};
Multipart.prototype.end = function() {
var self = this;
if (this._nparts === 0 && !self._boy._done) {
process.nextTick(function() {
self._boy._done = true;
self._boy.emit('finish');
});
} else if (this.parser.writable)
this.parser.end();
};
function skipPart(part) {
part.resume();
}
function FileStream(opts) {
if (!(this instanceof FileStream))
return new FileStream(opts);
ReadableStream.call(this, opts);
this.truncated = false;
}
inherits(FileStream, ReadableStream);
FileStream.prototype._read = function(n) {};
module.exports = Multipart;

214
node_modules/busboy/lib/types/urlencoded.js

@ -0,0 +1,214 @@
var Decoder = require('../utils').Decoder,
decodeText = require('../utils').decodeText;
var RE_CHARSET = /^charset$/i;
UrlEncoded.detect = /^application\/x-www-form-urlencoded/i;
function UrlEncoded(boy, cfg) {
if (!(this instanceof UrlEncoded))
return new UrlEncoded(boy, cfg);
var limits = cfg.limits,
headers = cfg.headers,
parsedConType = cfg.parsedConType;
this.boy = boy;
this.fieldSizeLimit = (limits && typeof limits.fieldSize === 'number'
? limits.fieldSize
: 1 * 1024 * 1024);
this.fieldNameSizeLimit = (limits && typeof limits.fieldNameSize === 'number'
? limits.fieldNameSize
: 100);
this.fieldsLimit = (limits && typeof limits.fields === 'number'
? limits.fields
: Infinity);
var charset;
for (var i = 0, len = parsedConType.length; i < len; ++i) {
if (Array.isArray(parsedConType[i])
&& RE_CHARSET.test(parsedConType[i][0])) {
charset = parsedConType[i][1].toLowerCase();
break;
}
}
if (charset === undefined)
charset = cfg.defCharset || 'utf8';
this.decoder = new Decoder();
this.charset = charset;
this._fields = 0;
this._state = 'key';
this._checkingBytes = true;
this._bytesKey = 0;
this._bytesVal = 0;
this._key = '';
this._val = '';
this._keyTrunc = false;
this._valTrunc = false;
this._hitlimit = false;
}
UrlEncoded.prototype.write = function(data, cb) {
if (this._fields === this.fieldsLimit) {
if (!this.boy.hitFieldsLimit) {
this.boy.hitFieldsLimit = true;
this.boy.emit('fieldsLimit');
}
return cb();
}
var idxeq, idxamp, i, p = 0, len = data.length;
while (p < len) {
if (this._state === 'key') {
idxeq = idxamp = undefined;
for (i = p; i < len; ++i) {
if (!this._checkingBytes)
++p;
if (data[i] === 0x3D/*=*/) {
idxeq = i;
break;
} else if (data[i] === 0x26/*&*/) {
idxamp = i;
break;
}
if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
this._hitLimit = true;
break;
} else if (this._checkingBytes)
++this._bytesKey;
}
if (idxeq !== undefined) {
// key with assignment
if (idxeq > p)
this._key += this.decoder.write(data.toString('binary', p, idxeq));
this._state = 'val';
this._hitLimit = false;
this._checkingBytes = true;
this._val = '';
this._bytesVal = 0;
this._valTrunc = false;
this.decoder.reset();
p = idxeq + 1;
} else if (idxamp !== undefined) {
// key with no assignment
++this._fields;
var key, keyTrunc = this._keyTrunc;
if (idxamp > p)
key = (this._key += this.decoder.write(data.toString('binary', p, idxamp)));
else
key = this._key;
this._hitLimit = false;
this._checkingBytes = true;
this._key = '';
this._bytesKey = 0;
this._keyTrunc = false;
this.decoder.reset();
if (key.length) {
this.boy.emit('field', decodeText(key, 'binary', this.charset),
'',
keyTrunc,
false);
}
p = idxamp + 1;
if (this._fields === this.fieldsLimit)
return cb();
} else if (this._hitLimit) {
// we may not have hit the actual limit if there are encoded bytes...
if (i > p)
this._key += this.decoder.write(data.toString('binary', p, i));
p = i;
if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
// yep, we actually did hit the limit
this._checkingBytes = false;
this._keyTrunc = true;
}
} else {
if (p < len)
this._key += this.decoder.write(data.toString('binary', p));
p = len;
}
} else {
idxamp = undefined;
for (i = p; i < len; ++i) {
if (!this._checkingBytes)
++p;
if (data[i] === 0x26/*&*/) {
idxamp = i;
break;
}
if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
this._hitLimit = true;
break;
}
else if (this._checkingBytes)
++this._bytesVal;
}
if (idxamp !== undefined) {
++this._fields;
if (idxamp > p)
this._val += this.decoder.write(data.toString('binary', p, idxamp));
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
decodeText(this._val, 'binary', this.charset),
this._keyTrunc,
this._valTrunc);
this._state = 'key';
this._hitLimit = false;
this._checkingBytes = true;
this._key = '';
this._bytesKey = 0;
this._keyTrunc = false;
this.decoder.reset();
p = idxamp + 1;
if (this._fields === this.fieldsLimit)
return cb();
} else if (this._hitLimit) {
// we may not have hit the actual limit if there are encoded bytes...
if (i > p)
this._val += this.decoder.write(data.toString('binary', p, i));
p = i;
if ((this._val === '' && this.fieldSizeLimit === 0)
|| (this._bytesVal = this._val.length) === this.fieldSizeLimit) {
// yep, we actually did hit the limit
this._checkingBytes = false;
this._valTrunc = true;
}
} else {
if (p < len)
this._val += this.decoder.write(data.toString('binary', p));
p = len;
}
}
}
cb();
};
UrlEncoded.prototype.end = function() {
if (this.boy._done)
return;
if (this._state === 'key' && this._key.length > 0) {
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
'',
this._keyTrunc,
false);
} else if (this._state === 'val') {
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
decodeText(this._val, 'binary', this.charset),
this._keyTrunc,
this._valTrunc);
}
this.boy._done = true;
this.boy.emit('finish');
};
module.exports = UrlEncoded;

172
node_modules/busboy/lib/utils.js

@ -0,0 +1,172 @@
var jsencoding = require('../deps/encoding/encoding');
var RE_ENCODED = /%([a-fA-F0-9]{2})/g;
function encodedReplacer(match, byte) {
return String.fromCharCode(parseInt(byte, 16));
}
function parseParams(str) {
var res = [],
state = 'key',
charset = '',
inquote = false,
escaping = false,
p = 0,
tmp = '';
for (var i = 0, len = str.length; i < len; ++i) {
if (str[i] === '\\' && inquote) {
if (escaping)
escaping = false;
else {
escaping = true;
continue;
}
} else if (str[i] === '"') {
if (!escaping) {
if (inquote) {
inquote = false;
state = 'key';
} else
inquote = true;
continue;
} else
escaping = false;
} else {
if (escaping && inquote)
tmp += '\\';
escaping = false;
if ((state === 'charset' || state === 'lang') && str[i] === "'") {
if (state === 'charset') {
state = 'lang';
charset = tmp.substring(1);
} else
state = 'value';
tmp = '';
continue;
} else if (state === 'key'
&& (str[i] === '*' || str[i] === '=')
&& res.length) {
if (str[i] === '*')
state = 'charset';
else
state = 'value';
res[p] = [tmp, undefined];
tmp = '';
continue;
} else if (!inquote && str[i] === ';') {
state = 'key';
if (charset) {
if (tmp.length) {
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
'binary',
charset);
}
charset = '';
}
if (res[p] === undefined)
res[p] = tmp;
else
res[p][1] = tmp;
tmp = '';
++p;
continue;
} else if (!inquote && (str[i] === ' ' || str[i] === '\t'))
continue;
}
tmp += str[i];
}
if (charset && tmp.length) {
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
'binary',
charset);
}
if (res[p] === undefined) {
if (tmp)
res[p] = tmp;
} else
res[p][1] = tmp;
return res;
};
exports.parseParams = parseParams;
function decodeText(text, textEncoding, destEncoding) {
var ret;
if (text && jsencoding.encodingExists(destEncoding)) {
try {
ret = jsencoding.TextDecoder(destEncoding)
.decode(Buffer.from(text, textEncoding));
} catch(e) {}
}
return (typeof ret === 'string' ? ret : text);
}
exports.decodeText = decodeText;
var HEX = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
], RE_PLUS = /\+/g;
function Decoder() {
this.buffer = undefined;
}
Decoder.prototype.write = function(str) {
// Replace '+' with ' ' before decoding
str = str.replace(RE_PLUS, ' ');
var res = '';
var i = 0, p = 0, len = str.length;
for (; i < len; ++i) {
if (this.buffer !== undefined) {
if (!HEX[str.charCodeAt(i)]) {
res += '%' + this.buffer;
this.buffer = undefined;
--i; // retry character
} else {
this.buffer += str[i];
++p;
if (this.buffer.length === 2) {
res += String.fromCharCode(parseInt(this.buffer, 16));
this.buffer = undefined;
}
}
} else if (str[i] === '%') {
if (i > p) {
res += str.substring(p, i);
p = i;
}
this.buffer = '';
++p;
}
}
if (p < len && this.buffer === undefined)
res += str.substring(p);
return res;
};
Decoder.prototype.reset = function() {
this.buffer = undefined;
};
exports.Decoder = Decoder;
function basename(path) {
if (typeof path !== 'string')
return '';
for (var i = path.length - 1; i >= 0; --i) {
switch (path.charCodeAt(i)) {
case 0x2F: // '/'
case 0x5C: // '\'
path = path.slice(i + 1);
return (path === '..' || path === '.' ? '' : path);
}
}
return (path === '..' || path === '.' ? '' : path);
}
exports.basename = basename;

16
node_modules/busboy/package.json

@ -0,0 +1,16 @@
{ "name": "busboy",
"version": "0.3.1",
"author": "Brian White <mscdex@mscdex.net>",
"description": "A streaming parser for HTML form data for node.js",
"main": "./lib/main",
"dependencies": {
"dicer": "0.3.0"
},
"scripts": {
"test": "node test/test.js"
},
"engines": { "node": ">=4.5.0" },
"keywords": [ "uploads", "forms", "multipart", "form-data" ],
"licenses": [ { "type": "MIT", "url": "http://github.com/mscdex/busboy/raw/master/LICENSE" } ],
"repository" : { "type": "git", "url": "http://github.com/mscdex/busboy.git" }
}

80
node_modules/busboy/test/test-types-multipart-stream-pause.js

@ -0,0 +1,80 @@
var Busboy = require('..');
var path = require('path');
var inspect = require('util').inspect;
var assert = require('assert');
function formDataSection(key, value) {
return Buffer.from('\r\n--' + BOUNDARY
+ '\r\nContent-Disposition: form-data; name="'
+ key + '"\r\n\r\n' + value);
}
function formDataFile(key, filename, contentType) {
return Buffer.concat([
Buffer.from('\r\n--' + BOUNDARY + '\r\n'),
Buffer.from('Content-Disposition: form-data; name="'
+ key + '"; filename="' + filename + '"\r\n'),
Buffer.from('Content-Type: ' + contentType + '\r\n\r\n'),
Buffer.allocUnsafe(100000)
]);
}
var BOUNDARY = 'u2KxIV5yF1y+xUspOQCCZopaVgeV6Jxihv35XQJmuTx8X3sh';
var reqChunks = [
Buffer.concat([
formDataFile('file', 'file.bin', 'application/octet-stream'),
formDataSection('foo', 'foo value')
]),
formDataSection('bar', 'bar value'),
Buffer.from('\r\n--' + BOUNDARY + '--\r\n')
];
var busboy = new Busboy({
headers: {
'content-type': 'multipart/form-data; boundary=' + BOUNDARY
}
});
var finishes = 0;
var results = [];
var expected = [
['file', 'file', 'file.bin', '7bit', 'application/octet-stream'],
['field', 'foo', 'foo value', false, false, '7bit', 'text/plain'],
['field', 'bar', 'bar value', false, false, '7bit', 'text/plain'],
];
busboy.on('field', function(key, val, keyTrunc, valTrunc, encoding, contype) {
results.push(['field', key, val, keyTrunc, valTrunc, encoding, contype]);
});
busboy.on('file', function(fieldname, stream, filename, encoding, mimeType) {
results.push(['file', fieldname, filename, encoding, mimeType]);
// Simulate a pipe where the destination is pausing (perhaps due to waiting
// for file system write to finish)
setTimeout(function() {
stream.resume();
}, 10);
});
busboy.on('finish', function() {
assert(finishes++ === 0, 'finish emitted multiple times');
assert.deepEqual(results.length,
expected.length,
'Parsed result count mismatch. Saw '
+ results.length
+ '. Expected: ' + expected.length);
results.forEach(function(result, i) {
assert.deepEqual(result,
expected[i],
'Result mismatch:\nParsed: ' + inspect(result)
+ '\nExpected: ' + inspect(expected[i]));
});
}).on('error', function(err) {
assert(false, 'Unexpected error: ' + err.stack);
});
reqChunks.forEach(function(buf) {
busboy.write(buf);
});
busboy.end();
process.on('exit', function() {
assert(finishes === 1, 'busboy did not finish');
});

343
node_modules/busboy/test/test-types-multipart.js

@ -0,0 +1,343 @@
var Busboy = require('..');
var path = require('path'),
inspect = require('util').inspect,
assert = require('assert');
var EMPTY_FN = function() {};
var t = 0,
group = path.basename(__filename, '.js') + '/';
var tests = [
{ source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_0"',
'',
'super alpha file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_1"',
'',
'super beta file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_1"; filename="1k_b.dat"',
'Content-Type: application/octet-stream',
'',
'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
expected: [
['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'],
['field', 'file_name_1', 'super beta file', false, false, '7bit', 'text/plain'],
['file', 'upload_file_0', 1023, 0, '1k_a.dat', '7bit', 'application/octet-stream'],
['file', 'upload_file_1', 1023, 0, '1k_b.dat', '7bit', 'application/octet-stream']
],
what: 'Fields and files'
},
{ source: [
['------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
'Content-Disposition: form-data; name="cont"',
'',
'some random content',
'------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
'Content-Disposition: form-data; name="pass"',
'',
'some random pass',
'------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
'Content-Disposition: form-data; name="bit"',
'',
'2',
'------WebKitFormBoundaryTB2MiQ36fnSJlrhY--'
].join('\r\n')
],
boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
expected: [
['field', 'cont', 'some random content', false, false, '7bit', 'text/plain'],
['field', 'pass', 'some random pass', false, false, '7bit', 'text/plain'],
['field', 'bit', '2', false, false, '7bit', 'text/plain']
],
what: 'Fields only'
},
{ source: [
''
],
boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
expected: [],
what: 'No fields and no files'
},
{ source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_0"',
'',
'super alpha file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
limits: {
fileSize: 13,
fieldSize: 5
},
expected: [
['field', 'file_name_0', 'super', false, true, '7bit', 'text/plain'],
['file', 'upload_file_0', 13, 2, '1k_a.dat', '7bit', 'application/octet-stream']
],
what: 'Fields and files (limits)'
},
{ source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_0"',
'',
'super alpha file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
limits: {
files: 0
},
expected: [
['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain']
],
what: 'Fields and files (limits: 0 files)'
},
{ source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_0"',
'',
'super alpha file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_1"',
'',
'super beta file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_1"; filename="1k_b.dat"',
'Content-Type: application/octet-stream',
'',
'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
expected: [
['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'],
['field', 'file_name_1', 'super beta file', false, false, '7bit', 'text/plain'],
],
events: ['field'],
what: 'Fields and (ignored) files'
},
{ source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="/tmp/1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_1"; filename="C:\\files\\1k_b.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_2"; filename="relative/1k_c.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
expected: [
['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream'],
['file', 'upload_file_1', 26, 0, '1k_b.dat', '7bit', 'application/octet-stream'],
['file', 'upload_file_2', 26, 0, '1k_c.dat', '7bit', 'application/octet-stream']
],
what: 'Files with filenames containing paths'
},
{ source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="/absolute/1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_1"; filename="C:\\absolute\\1k_b.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_2"; filename="relative/1k_c.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
preservePath: true,
expected: [
['file', 'upload_file_0', 26, 0, '/absolute/1k_a.dat', '7bit', 'application/octet-stream'],
['file', 'upload_file_1', 26, 0, 'C:\\absolute\\1k_b.dat', '7bit', 'application/octet-stream'],
['file', 'upload_file_2', 26, 0, 'relative/1k_c.dat', '7bit', 'application/octet-stream']
],
what: 'Paths to be preserved through the preservePath option'
},
{ source: [
['------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
'Content-Disposition: form-data; name="cont"',
'Content-Type: ',
'',
'some random content',
'------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
'Content-Disposition: ',
'',
'some random pass',
'------WebKitFormBoundaryTB2MiQ36fnSJlrhY--'
].join('\r\n')
],
boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
expected: [
['field', 'cont', 'some random content', false, false, '7bit', 'text/plain']
],
what: 'Empty content-type and empty content-disposition'
},
{ source: [
['--asdasdasdasd\r\n',
'Content-Type: text/plain\r\n',
'Content-Disposition: form-data; name="foo"\r\n',
'\r\n',
'asd\r\n',
'--asdasdasdasd--'
].join(':)')
],
boundary: 'asdasdasdasd',
expected: [],
shouldError: 'Unexpected end of multipart data',
what: 'Stopped mid-header'
},
{ source: [
['------WebKitFormBoundaryTB2MiQ36fnSJlrhY',
'Content-Disposition: form-data; name="cont"',
'Content-Type: application/json',
'',
'{}',
'------WebKitFormBoundaryTB2MiQ36fnSJlrhY--',
].join('\r\n')
],
boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
expected: [
['field', 'cont', '{}', false, false, '7bit', 'application/json']
],
what: 'content-type for fields'
},
{ source: [
'------WebKitFormBoundaryTB2MiQ36fnSJlrhY--\r\n'
],
boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY',
expected: [],
what: 'empty form'
}
];
function next() {
if (t === tests.length)
return;
var v = tests[t];
var busboy = new Busboy({
limits: v.limits,
preservePath: v.preservePath,
headers: {
'content-type': 'multipart/form-data; boundary=' + v.boundary
}
}),
finishes = 0,
results = [];
if (v.events === undefined || v.events.indexOf('field') > -1) {
busboy.on('field', function(key, val, keyTrunc, valTrunc, encoding, contype) {
results.push(['field', key, val, keyTrunc, valTrunc, encoding, contype]);
});
}
if (v.events === undefined || v.events.indexOf('file') > -1) {
busboy.on('file', function(fieldname, stream, filename, encoding, mimeType) {
var nb = 0,
info = ['file',
fieldname,
nb,
0,
filename,
encoding,
mimeType];
results.push(info);
stream.on('data', function(d) {
nb += d.length;
}).on('limit', function() {
++info[3];
}).on('end', function() {
info[2] = nb;
if (stream.truncated)
++info[3];
});
});
}
busboy.on('finish', function() {
assert(finishes++ === 0, makeMsg(v.what, 'finish emitted multiple times'));
assert.deepEqual(results.length,
v.expected.length,
makeMsg(v.what, 'Parsed result count mismatch. Saw '
+ results.length
+ '. Expected: ' + v.expected.length));
results.forEach(function(result, i) {
assert.deepEqual(result,
v.expected[i],
makeMsg(v.what,
'Result mismatch:\nParsed: ' + inspect(result)
+ '\nExpected: ' + inspect(v.expected[i]))
);
});
++t;
next();
}).on('error', function(err) {
if (!v.shouldError || v.shouldError !== err.message)
assert(false, makeMsg(v.what, 'Unexpected error: ' + err));
});
v.source.forEach(function(s) {
busboy.write(Buffer.from(s, 'utf8'), EMPTY_FN);
});
busboy.end();
}
next();
function makeMsg(what, msg) {
return '[' + group + what + ']: ' + msg;
}
process.on('exit', function() {
assert(t === tests.length,
makeMsg('_exit',
'Only finished ' + t + '/' + tests.length + ' tests'));
});

183
node_modules/busboy/test/test-types-urlencoded.js

@ -0,0 +1,183 @@
var Busboy = require('..');
var path = require('path'),
inspect = require('util').inspect,
assert = require('assert');
var EMPTY_FN = function() {};
var t = 0,
group = path.basename(__filename, '.js') + '/';
var tests = [
{ source: ['foo'],
expected: [['foo', '', false, false]],
what: 'Unassigned value'
},
{ source: ['foo=bar'],
expected: [['foo', 'bar', false, false]],
what: 'Assigned value'
},
{ source: ['foo&bar=baz'],
expected: [['foo', '', false, false],
['bar', 'baz', false, false]],
what: 'Unassigned and assigned value'
},
{ source: ['foo=bar&baz'],
expected: [['foo', 'bar', false, false],
['baz', '', false, false]],
what: 'Assigned and unassigned value'
},
{ source: ['foo=bar&baz=bla'],
expected: [['foo', 'bar', false, false],
['baz', 'bla', false, false]],
what: 'Two assigned values'
},
{ source: ['foo&bar'],
expected: [['foo', '', false, false],
['bar', '', false, false]],
what: 'Two unassigned values'
},
{ source: ['foo&bar&'],
expected: [['foo', '', false, false],
['bar', '', false, false]],
what: 'Two unassigned values and ampersand'
},
{ source: ['foo=bar+baz%2Bquux'],
expected: [['foo', 'bar baz+quux', false, false]],
what: 'Assigned value with (plus) space'
},
{ source: ['foo=bar%20baz%21'],
expected: [['foo', 'bar baz!', false, false]],
what: 'Assigned value with encoded bytes'
},
{ source: ['foo%20bar=baz%20bla%21'],
expected: [['foo bar', 'baz bla!', false, false]],
what: 'Assigned value with encoded bytes #2'
},
{ source: ['foo=bar%20baz%21&num=1000'],
expected: [['foo', 'bar baz!', false, false],
['num', '1000', false, false]],
what: 'Two assigned values, one with encoded bytes'
},
{ source: ['foo=bar&baz=bla'],
expected: [],
what: 'Limits: zero fields',
limits: { fields: 0 }
},
{ source: ['foo=bar&baz=bla'],
expected: [['foo', 'bar', false, false]],
what: 'Limits: one field',
limits: { fields: 1 }
},
{ source: ['foo=bar&baz=bla'],
expected: [['foo', 'bar', false, false],
['baz', 'bla', false, false]],
what: 'Limits: field part lengths match limits',
limits: { fieldNameSize: 3, fieldSize: 3 }
},
{ source: ['foo=bar&baz=bla'],
expected: [['fo', 'bar', true, false],
['ba', 'bla', true, false]],
what: 'Limits: truncated field name',
limits: { fieldNameSize: 2 }
},
{ source: ['foo=bar&baz=bla'],
expected: [['foo', 'ba', false, true],
['baz', 'bl', false, true]],
what: 'Limits: truncated field value',
limits: { fieldSize: 2 }
},
{ source: ['foo=bar&baz=bla'],
expected: [['fo', 'ba', true, true],
['ba', 'bl', true, true]],
what: 'Limits: truncated field name and value',
limits: { fieldNameSize: 2, fieldSize: 2 }
},
{ source: ['foo=bar&baz=bla'],
expected: [['fo', '', true, true],
['ba', '', true, true]],
what: 'Limits: truncated field name and zero value limit',
limits: { fieldNameSize: 2, fieldSize: 0 }
},
{ source: ['foo=bar&baz=bla'],
expected: [['', '', true, true],
['', '', true, true]],
what: 'Limits: truncated zero field name and zero value limit',
limits: { fieldNameSize: 0, fieldSize: 0 }
},
{ source: ['&'],
expected: [],
what: 'Ampersand'
},
{ source: ['&&&&&'],
expected: [],
what: 'Many ampersands'
},
{ source: ['='],
expected: [['', '', false, false]],
what: 'Assigned value, empty name and value'
},
{ source: [''],
expected: [],
what: 'Nothing'
},
];
function next() {
if (t === tests.length)
return;
var v = tests[t];
var busboy = new Busboy({
limits: v.limits,
headers: {
'content-type': 'application/x-www-form-urlencoded; charset=utf-8'
}
}),
finishes = 0,
results = [];
busboy.on('field', function(key, val, keyTrunc, valTrunc) {
results.push([key, val, keyTrunc, valTrunc]);
});
busboy.on('file', function() {
throw new Error(makeMsg(v.what, 'Unexpected file'));
});
busboy.on('finish', function() {
assert(finishes++ === 0, makeMsg(v.what, 'finish emitted multiple times'));
assert.deepEqual(results.length,
v.expected.length,
makeMsg(v.what, 'Parsed result count mismatch. Saw '
+ results.length
+ '. Expected: ' + v.expected.length));
var i = 0;
results.forEach(function(result) {
assert.deepEqual(result,
v.expected[i],
makeMsg(v.what,
'Result mismatch:\nParsed: ' + inspect(result)
+ '\nExpected: ' + inspect(v.expected[i]))
);
++i;
});
++t;
next();
});
v.source.forEach(function(s) {
busboy.write(Buffer.from(s, 'utf8'), EMPTY_FN);
});
busboy.end();
}
next();
function makeMsg(what, msg) {
return '[' + group + what + ']: ' + msg;
}
process.on('exit', function() {
assert(t === tests.length, makeMsg('_exit', 'Only finished ' + t + '/' + tests.length + ' tests'));
});

66
node_modules/busboy/test/test-utils-decoder.js

@ -0,0 +1,66 @@
var Decoder = require('../lib/utils').Decoder;
var path = require('path'),
assert = require('assert');
var group = path.basename(__filename, '.js') + '/';
[
{ source: ['Hello world'],
expected: 'Hello world',
what: 'No encoded bytes'
},
{ source: ['Hello%20world'],
expected: 'Hello world',
what: 'One full encoded byte'
},
{ source: ['Hello%20world%21'],
expected: 'Hello world!',
what: 'Two full encoded bytes'
},
{ source: ['Hello%', '20world'],
expected: 'Hello world',
what: 'One full encoded byte split #1'
},
{ source: ['Hello%2', '0world'],
expected: 'Hello world',
what: 'One full encoded byte split #2'
},
{ source: ['Hello%20', 'world'],
expected: 'Hello world',
what: 'One full encoded byte (concat)'
},
{ source: ['Hello%2Qworld'],
expected: 'Hello%2Qworld',
what: 'Malformed encoded byte #1'
},
{ source: ['Hello%world'],
expected: 'Hello%world',
what: 'Malformed encoded byte #2'
},
{ source: ['Hello+world'],
expected: 'Hello world',
what: 'Plus to space'
},
{ source: ['Hello+world%21'],
expected: 'Hello world!',
what: 'Plus and encoded byte'
},
{ source: ['5%2B5%3D10'],
expected: '5+5=10',
what: 'Encoded plus'
},
{ source: ['5+%2B+5+%3D+10'],
expected: '5 + 5 = 10',
what: 'Spaces and encoded plus'
},
].forEach(function(v) {
var dec = new Decoder(), result = '';
v.source.forEach(function(s) {
result += dec.write(s);
});
var msg = '[' + group + v.what + ']: decoded string mismatch.\n'
+ 'Saw: ' + result + '\n'
+ 'Expected: ' + v.expected;
assert.deepEqual(result, v.expected, msg);
});

96
node_modules/busboy/test/test-utils-parse-params.js

@ -0,0 +1,96 @@
var parseParams = require('../lib/utils').parseParams;
var path = require('path'),
assert = require('assert'),
inspect = require('util').inspect;
var group = path.basename(__filename, '.js') + '/';
[
{ source: 'video/ogg',
expected: ['video/ogg'],
what: 'No parameters'
},
{ source: 'video/ogg;',
expected: ['video/ogg'],
what: 'No parameters (with separator)'
},
{ source: 'video/ogg; ',
expected: ['video/ogg'],
what: 'No parameters (with separator followed by whitespace)'
},
{ source: ';video/ogg',
expected: ['', 'video/ogg'],
what: 'Empty parameter'
},
{ source: 'video/*',
expected: ['video/*'],
what: 'Subtype with asterisk'
},
{ source: 'text/plain; encoding=utf8',
expected: ['text/plain', ['encoding', 'utf8']],
what: 'Unquoted'
},
{ source: 'text/plain; encoding=',
expected: ['text/plain', ['encoding', '']],
what: 'Unquoted empty string'
},
{ source: 'text/plain; encoding="utf8"',
expected: ['text/plain', ['encoding', 'utf8']],
what: 'Quoted'
},
{ source: 'text/plain; greeting="hello \\"world\\""',
expected: ['text/plain', ['greeting', 'hello "world"']],
what: 'Quotes within quoted'
},
{ source: 'text/plain; encoding=""',
expected: ['text/plain', ['encoding', '']],
what: 'Quoted empty string'
},
{ source: 'text/plain; encoding="utf8";\t foo=bar;test',
expected: ['text/plain', ['encoding', 'utf8'], ['foo', 'bar'], 'test'],
what: 'Multiple params with various spacing'
},
{ source: "text/plain; filename*=iso-8859-1'en'%A3%20rates",
expected: ['text/plain', ['filename', '£ rates']],
what: 'Extended parameter (RFC 5987) with language'
},
{ source: "text/plain; filename*=utf-8''%c2%a3%20and%20%e2%82%ac%20rates",
expected: ['text/plain', ['filename', '£ and € rates']],
what: 'Extended parameter (RFC 5987) without language'
},
{ source: "text/plain; filename*=utf-8''%E6%B5%8B%E8%AF%95%E6%96%87%E6%A1%A3",
expected: ['text/plain', ['filename', '测试文档']],
what: 'Extended parameter (RFC 5987) without language #2'
},
{ source: "text/plain; filename*=iso-8859-1'en'%A3%20rates; altfilename*=utf-8''%c2%a3%20and%20%e2%82%ac%20rates",
expected: ['text/plain', ['filename', '£ rates'], ['altfilename', '£ and € rates']],
what: 'Multiple extended parameters (RFC 5987) with mixed charsets'
},
{ source: "text/plain; filename*=iso-8859-1'en'%A3%20rates; altfilename=\"foobarbaz\"",
expected: ['text/plain', ['filename', '£ rates'], ['altfilename', 'foobarbaz']],
what: 'Mixed regular and extended parameters (RFC 5987)'
},
{ source: "text/plain; filename=\"foobarbaz\"; altfilename*=iso-8859-1'en'%A3%20rates",
expected: ['text/plain', ['filename', 'foobarbaz'], ['altfilename', '£ rates']],
what: 'Mixed regular and extended parameters (RFC 5987) #2'
},
{ source: 'text/plain; filename="C:\\folder\\test.png"',
expected: ['text/plain', ['filename', 'C:\\folder\\test.png']],
what: 'Unescaped backslashes should be considered backslashes'
},
{ source: 'text/plain; filename="John \\"Magic\\" Smith.png"',
expected: ['text/plain', ['filename', 'John "Magic" Smith.png']],
what: 'Escaped double-quotes should be considered double-quotes'
},
{ source: 'multipart/form-data; charset=utf-8; boundary=0xKhTmLbOuNdArY',
expected: ['multipart/form-data', ['charset', 'utf-8'], ['boundary', '0xKhTmLbOuNdArY']],
what: 'Multiple non-quoted parameters'
},
].forEach(function(v) {
var result = parseParams(v.source),
msg = '[' + group + v.what + ']: parsed parameters mismatch.\n'
+ 'Saw: ' + inspect(result) + '\n'
+ 'Expected: ' + inspect(v.expected);
assert.deepEqual(result, v.expected, msg);
});

4
node_modules/busboy/test/test.js

@ -0,0 +1,4 @@
require('fs').readdirSync(__dirname).forEach(function(f) {
if (f.substr(0, 5) === 'test-')
require('./' + f);
});

16
node_modules/dicer/.travis.yml

@ -0,0 +1,16 @@
sudo: false
language: cpp
notifications:
email: false
env:
matrix:
- TRAVIS_NODE_VERSION="4"
- TRAVIS_NODE_VERSION="6"
- TRAVIS_NODE_VERSION="8"
- TRAVIS_NODE_VERSION="10"
install:
- rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && source ~/.nvm/nvm.sh && nvm install $TRAVIS_NODE_VERSION
- node --version
- npm --version
- npm install
script: npm test

19
node_modules/dicer/LICENSE

@ -0,0 +1,19 @@
Copyright Brian White. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.

122
node_modules/dicer/README.md

@ -0,0 +1,122 @@
Description
===========
A very fast streaming multipart parser for node.js.
Benchmarks can be found [here](https://github.com/mscdex/dicer/wiki/Benchmarks).
Requirements
============
* [node.js](http://nodejs.org/) -- v4.5.0 or newer
Install
============
npm install dicer
Examples
========
* Parse an HTTP form upload
```javascript
var inspect = require('util').inspect,
http = require('http');
var Dicer = require('dicer');
// quick and dirty way to parse multipart boundary
var RE_BOUNDARY = /^multipart\/.+?(?:; boundary=(?:(?:"(.+)")|(?:([^\s]+))))$/i,
HTML = Buffer.from('<html><head></head><body>\
<form method="POST" enctype="multipart/form-data">\
<input type="text" name="textfield"><br />\
<input type="file" name="filefield"><br />\
<input type="submit">\
</form>\
</body></html>'),
PORT = 8080;
http.createServer(function(req, res) {
var m;
if (req.method === 'POST'
&& req.headers['content-type']
&& (m = RE_BOUNDARY.exec(req.headers['content-type']))) {
var d = new Dicer({ boundary: m[1] || m[2] });
d.on('part', function(p) {
console.log('New part!');
p.on('header', function(header) {
for (var h in header) {
console.log('Part header: k: ' + inspect(h)
+ ', v: ' + inspect(header[h]));
}
});
p.on('data', function(data) {
console.log('Part data: ' + inspect(data.toString()));
});
p.on('end', function() {
console.log('End of part\n');
});
});
d.on('finish', function() {
console.log('End of parts');
res.writeHead(200);
res.end('Form submission successful!');
});
req.pipe(d);
} else if (req.method === 'GET' && req.url === '/') {
res.writeHead(200);
res.end(HTML);
} else {
res.writeHead(404);
res.end();
}
}).listen(PORT, function() {
console.log('Listening for requests on port ' + PORT);
});
```
API
===
_Dicer_ is a _WritableStream_
Dicer (special) events
----------------------
* **finish**() - Emitted when all parts have been parsed and the Dicer instance has been ended.
* **part**(< _PartStream_ >stream) - Emitted when a new part has been found.
* **preamble**(< _PartStream_ >stream) - Emitted for preamble if you should happen to need it (can usually be ignored).
* **trailer**(< _Buffer_ >data) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too).
Dicer methods
-------------
* **(constructor)**(< _object_ >config) - Creates and returns a new Dicer instance with the following valid `config` settings:
* **boundary** - _string_ - This is the boundary used to detect the beginning of a new part.
* **headerFirst** - _boolean_ - If true, preamble header parsing will be performed first.
* **maxHeaderPairs** - _integer_ - The maximum number of header key=>value pairs to parse **Default:** 2000 (same as node's http).
* **setBoundary**(< _string_ >boundary) - _(void)_ - Sets the boundary to use for parsing and performs some initialization needed for parsing. You should only need to use this if you set `headerFirst` to true in the constructor and are parsing the boundary from the preamble header.
_PartStream_ is a _ReadableStream_
PartStream (special) events
---------------------------
* **header**(< _object_ >header) - An object containing the header for this particular part. Each property value is an _array_ of one or more string values.

63
node_modules/dicer/bench/dicer-bench-multipart-parser.js

@ -0,0 +1,63 @@
var assert = require('assert');
var Dicer = require('..'),
boundary = '-----------------------------168072824752491622650073',
d = new Dicer({ boundary: boundary }),
mb = 100,
buffer = createMultipartBuffer(boundary, mb * 1024 * 1024),
callbacks =
{ partBegin: -1,
partEnd: -1,
headerField: -1,
headerValue: -1,
partData: -1,
end: -1,
};
d.on('part', function(p) {
callbacks.partBegin++;
p.on('header', function(header) {
/*for (var h in header)
console.log('Part header: k: ' + inspect(h) + ', v: ' + inspect(header[h]));*/
});
p.on('data', function(data) {
callbacks.partData++;
//console.log('Part data: ' + inspect(data.toString()));
});
p.on('end', function() {
//console.log('End of part\n');
callbacks.partEnd++;
});
});
d.on('end', function() {
//console.log('End of parts');
callbacks.end++;
});
var start = +new Date(),
nparsed = d.write(buffer),
duration = +new Date - start,
mbPerSec = (mb / (duration / 1000)).toFixed(2);
console.log(mbPerSec+' mb/sec');
//assert.equal(nparsed, buffer.length);
function createMultipartBuffer(boundary, size) {
var head =
'--'+boundary+'\r\n'
+ 'content-disposition: form-data; name="field1"\r\n'
+ '\r\n'
, tail = '\r\n--'+boundary+'--\r\n'
, buffer = Buffer.allocUnsafe(size);
buffer.write(head, 'ascii', 0);
buffer.write(tail, 'ascii', buffer.length - tail.length);
return buffer;
}
process.on('exit', function() {
/*for (var k in callbacks) {
assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]);
}*/
});

70
node_modules/dicer/bench/formidable-bench-multipart-parser.js

@ -0,0 +1,70 @@
var assert = require('assert');
require('../node_modules/formidable/test/common');
var multipartParser = require('../node_modules/formidable/lib/multipart_parser'),
MultipartParser = multipartParser.MultipartParser,
parser = new MultipartParser(),
boundary = '-----------------------------168072824752491622650073',
mb = 100,
buffer = createMultipartBuffer(boundary, mb * 1024 * 1024),
callbacks =
{ partBegin: -1,
partEnd: -1,
headerField: -1,
headerValue: -1,
partData: -1,
end: -1,
};
parser.initWithBoundary(boundary);
parser.onHeaderField = function() {
callbacks.headerField++;
};
parser.onHeaderValue = function() {
callbacks.headerValue++;
};
parser.onPartBegin = function() {
callbacks.partBegin++;
};
parser.onPartData = function() {
callbacks.partData++;
};
parser.onPartEnd = function() {
callbacks.partEnd++;
};
parser.onEnd = function() {
callbacks.end++;
};
var start = +new Date(),
nparsed = parser.write(buffer),
duration = +new Date - start,
mbPerSec = (mb / (duration / 1000)).toFixed(2);
console.log(mbPerSec+' mb/sec');
//assert.equal(nparsed, buffer.length);
function createMultipartBuffer(boundary, size) {
var head =
'--'+boundary+'\r\n'
+ 'content-disposition: form-data; name="field1"\r\n'
+ '\r\n'
, tail = '\r\n--'+boundary+'--\r\n'
, buffer = Buffer.allocUnsafe(size);
buffer.write(head, 'ascii', 0);
buffer.write(tail, 'ascii', buffer.length - tail.length);
return buffer;
}
process.on('exit', function() {
/*for (var k in callbacks) {
assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]);
}*/
});

56
node_modules/dicer/bench/multipartser-bench-multipart-parser.js

@ -0,0 +1,56 @@
var assert = require('assert');
var multipartser = require('multipartser'),
boundary = '-----------------------------168072824752491622650073',
parser = multipartser(),
mb = 100,
buffer = createMultipartBuffer(boundary, mb * 1024 * 1024),
callbacks =
{ partBegin: -1,
partEnd: -1,
headerField: -1,
headerValue: -1,
partData: -1,
end: -1,
};
parser.boundary( boundary );
parser.on( 'part', function ( part ) {
});
parser.on( 'end', function () {
//console.log( 'completed parsing' );
});
parser.on( 'error', function ( error ) {
console.error( error );
});
var start = +new Date(),
nparsed = parser.data(buffer),
nend = parser.end(),
duration = +new Date - start,
mbPerSec = (mb / (duration / 1000)).toFixed(2);
console.log(mbPerSec+' mb/sec');
//assert.equal(nparsed, buffer.length);
function createMultipartBuffer(boundary, size) {
var head =
'--'+boundary+'\r\n'
+ 'content-disposition: form-data; name="field1"\r\n'
+ '\r\n'
, tail = '\r\n--'+boundary+'--\r\n'
, buffer = Buffer.allocUnsafe(size);
buffer.write(head, 'ascii', 0);
buffer.write(tail, 'ascii', buffer.length - tail.length);
return buffer;
}
process.on('exit', function() {
/*for (var k in callbacks) {
assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]);
}*/
});

76
node_modules/dicer/bench/multiparty-bench-multipart-parser.js

@ -0,0 +1,76 @@
var assert = require('assert'),
Form = require('multiparty').Form,
boundary = '-----------------------------168072824752491622650073',
mb = 100,
buffer = createMultipartBuffer(boundary, mb * 1024 * 1024),
callbacks =
{ partBegin: -1,
partEnd: -1,
headerField: -1,
headerValue: -1,
partData: -1,
end: -1,
};
var form = new Form({ boundary: boundary });
hijack('onParseHeaderField', function() {
callbacks.headerField++;
});
hijack('onParseHeaderValue', function() {
callbacks.headerValue++;
});
hijack('onParsePartBegin', function() {
callbacks.partBegin++;
});
hijack('onParsePartData', function() {
callbacks.partData++;
});
hijack('onParsePartEnd', function() {
callbacks.partEnd++;
});
form.on('finish', function() {
callbacks.end++;
});
var start = new Date();
form.write(buffer, function(err) {
var duration = new Date() - start;
assert.ifError(err);
var mbPerSec = (mb / (duration / 1000)).toFixed(2);
console.log(mbPerSec+' mb/sec');
});
//assert.equal(nparsed, buffer.length);
function createMultipartBuffer(boundary, size) {
var head =
'--'+boundary+'\r\n'
+ 'content-disposition: form-data; name="field1"\r\n'
+ '\r\n'
, tail = '\r\n--'+boundary+'--\r\n'
, buffer = Buffer.allocUnsafe(size);
buffer.write(head, 'ascii', 0);
buffer.write(tail, 'ascii', buffer.length - tail.length);
return buffer;
}
process.on('exit', function() {
/*for (var k in callbacks) {
assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]);
}*/
});
function hijack(name, fn) {
var oldFn = form[name];
form[name] = function() {
fn();
return oldFn.apply(this, arguments);
};
}

63
node_modules/dicer/bench/parted-bench-multipart-parser.js

@ -0,0 +1,63 @@
// A special, edited version of the multipart parser from parted is needed here
// because otherwise it attempts to do some things above and beyond just parsing
// -- like saving to disk and whatnot
var assert = require('assert');
var Parser = require('./parted-multipart'),
boundary = '-----------------------------168072824752491622650073',
parser = new Parser('boundary=' + boundary),
mb = 100,
buffer = createMultipartBuffer(boundary, mb * 1024 * 1024),
callbacks =
{ partBegin: -1,
partEnd: -1,
headerField: -1,
headerValue: -1,
partData: -1,
end: -1,
};
parser.on('header', function() {
//callbacks.headerField++;
});
parser.on('data', function() {
//callbacks.partBegin++;
});
parser.on('part', function() {
});
parser.on('end', function() {
//callbacks.end++;
});
var start = +new Date(),
nparsed = parser.write(buffer),
duration = +new Date - start,
mbPerSec = (mb / (duration / 1000)).toFixed(2);
console.log(mbPerSec+' mb/sec');
//assert.equal(nparsed, buffer.length);
function createMultipartBuffer(boundary, size) {
var head =
'--'+boundary+'\r\n'
+ 'content-disposition: form-data; name="field1"\r\n'
+ '\r\n'
, tail = '\r\n--'+boundary+'--\r\n'
, buffer = Buffer.allocUnsafe(size);
buffer.write(head, 'ascii', 0);
buffer.write(tail, 'ascii', buffer.length - tail.length);
return buffer;
}
process.on('exit', function() {
/*for (var k in callbacks) {
assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]);
}*/
});

485
node_modules/dicer/bench/parted-multipart.js

@ -0,0 +1,485 @@
/**
* Parted (https://github.com/chjj/parted)
* A streaming multipart state parser.
* Copyright (c) 2011, Christopher Jeffrey. (MIT Licensed)
*/
var fs = require('fs')
, path = require('path')
, EventEmitter = require('events').EventEmitter
, StringDecoder = require('string_decoder').StringDecoder
, set = require('qs').set
, each = Array.prototype.forEach;
/**
* Character Constants
*/
var DASH = '-'.charCodeAt(0)
, CR = '\r'.charCodeAt(0)
, LF = '\n'.charCodeAt(0)
, COLON = ':'.charCodeAt(0)
, SPACE = ' '.charCodeAt(0);
/**
* Parser
*/
var Parser = function(type, options) {
if (!(this instanceof Parser)) {
return new Parser(type, options);
}
EventEmitter.call(this);
this.writable = true;
this.readable = true;
this.options = options || {};
var key = grab(type, 'boundary');
if (!key) {
return this._error('No boundary key found.');
}
this.key = Buffer.allocUnsafe('\r\n--' + key);
this._key = {};
each.call(this.key, function(ch) {
this._key[ch] = true;
}, this);
this.state = 'start';
this.pending = 0;
this.written = 0;
this.writtenDisk = 0;
this.buff = Buffer.allocUnsafe(200);
this.preamble = true;
this.epilogue = false;
this._reset();
};
Parser.prototype.__proto__ = EventEmitter.prototype;
/**
* Parsing
*/
Parser.prototype.write = function(data) {
if (!this.writable
|| this.epilogue) return;
try {
this._parse(data);
} catch (e) {
this._error(e);
}
return true;
};
Parser.prototype.end = function(data) {
if (!this.writable) return;
if (data) this.write(data);
if (!this.epilogue) {
return this._error('Message underflow.');
}
return true;
};
Parser.prototype._parse = function(data) {
var i = 0
, len = data.length
, buff = this.buff
, key = this.key
, ch
, val
, j;
for (; i < len; i++) {
if (this.pos >= 200) {
return this._error('Potential buffer overflow.');
}
ch = data[i];
switch (this.state) {
case 'start':
switch (ch) {
case DASH:
this.pos = 3;
this.state = 'key';
break;
default:
break;
}
break;
case 'key':
if (this.pos === key.length) {
this.state = 'key_end';
i--;
} else if (ch !== key[this.pos]) {
if (this.preamble) {
this.state = 'start';
i--;
} else {
this.state = 'body';
val = this.pos - i;
if (val > 0) {
this._write(key.slice(0, val));
}
i--;
}
} else {
this.pos++;
}
break;
case 'key_end':
switch (ch) {
case CR:
this.state = 'key_line_end';
break;
case DASH:
this.state = 'key_dash_end';
break;
default:
return this._error('Expected CR or DASH.');
}
break;
case 'key_line_end':
switch (ch) {
case LF:
if (this.preamble) {
this.preamble = false;
} else {
this._finish();
}
this.state = 'header_name';
this.pos = 0;
break;
default:
return this._error('Expected CR.');
}
break;
case 'key_dash_end':
switch (ch) {
case DASH:
this.epilogue = true;
this._finish();
return;
default:
return this._error('Expected DASH.');
}
break;
case 'header_name':
switch (ch) {
case COLON:
this.header = buff.toString('ascii', 0, this.pos);
this.pos = 0;
this.state = 'header_val';
break;
default:
buff[this.pos++] = ch | 32;
break;
}
break;
case 'header_val':
switch (ch) {
case CR:
this.state = 'header_val_end';
break;
case SPACE:
if (this.pos === 0) {
break;
}
; // FALL-THROUGH
default:
buff[this.pos++] = ch;
break;
}
break;
case 'header_val_end':
switch (ch) {
case LF:
val = buff.toString('ascii', 0, this.pos);
this._header(this.header, val);
this.pos = 0;
this.state = 'header_end';
break;
default:
return this._error('Expected LF.');
}
break;
case 'header_end':
switch (ch) {
case CR:
this.state = 'head_end';
break;
default:
this.state = 'header_name';
i--;
break;
}
break;
case 'head_end':
switch (ch) {
case LF:
this.state = 'body';
i++;
if (i >= len) return;
data = data.slice(i);
i = -1;
len = data.length;
break;
default:
return this._error('Expected LF.');
}
break;
case 'body':
switch (ch) {
case CR:
if (i > 0) {
this._write(data.slice(0, i));
}
this.pos = 1;
this.state = 'key';
data = data.slice(i);
i = 0;
len = data.length;
break;
default:
// boyer-moore-like algorithm
// at felixge's suggestion
while ((j = i + key.length - 1) < len) {
if (this._key[data[j]]) break;
i = j;
}
break;
}
break;
}
}
if (this.state === 'body') {
this._write(data);
}
};
Parser.prototype._header = function(name, val) {
/*if (name === 'content-disposition') {
this.field = grab(val, 'name');
this.file = grab(val, 'filename');
if (this.file) {
this.data = stream(this.file, this.options.path);
} else {
this.decode = new StringDecoder('utf8');
this.data = '';
}
}*/
return this.emit('header', name, val);
};
Parser.prototype._write = function(data) {
/*if (this.data == null) {
return this._error('No disposition.');
}
if (this.file) {
this.data.write(data);
this.writtenDisk += data.length;
} else {
this.data += this.decode.write(data);
this.written += data.length;
}*/
this.emit('data', data);
};
Parser.prototype._reset = function() {
this.pos = 0;
this.decode = null;
this.field = null;
this.data = null;
this.file = null;
this.header = null;
};
Parser.prototype._error = function(err) {
this.destroy();
this.emit('error', typeof err === 'string'
? new Error(err)
: err);
};
Parser.prototype.destroy = function(err) {
this.writable = false;
this.readable = false;
this._reset();
};
Parser.prototype._finish = function() {
var self = this
, field = this.field
, data = this.data
, file = this.file
, part;
this.pending++;
this._reset();
if (data && data.path) {
part = data.path;
data.end(next);
} else {
part = data;
next();
}
function next() {
if (!self.readable) return;
self.pending--;
self.emit('part', field, part);
if (data && data.path) {
self.emit('file', field, part, file);
}
if (self.epilogue && !self.pending) {
self.emit('end');
self.destroy();
}
}
};
/**
* Uploads
*/
Parser.root = process.platform === 'win32'
? 'C:/Temp'
: '/tmp';
/**
* Middleware
*/
Parser.middleware = function(options) {
options = options || {};
return function(req, res, next) {
if (options.ensureBody) {
req.body = {};
}
if (req.method === 'GET'
|| req.method === 'HEAD'
|| req._multipart) return next();
req._multipart = true;
var type = req.headers['content-type'];
if (type) type = type.split(';')[0].trim().toLowerCase();
if (type === 'multipart/form-data') {
Parser.handle(req, res, next, options);
} else {
next();
}
};
};
/**
* Handler
*/
Parser.handle = function(req, res, next, options) {
var parser = new Parser(req.headers['content-type'], options)
, diskLimit = options.diskLimit
, limit = options.limit
, parts = {}
, files = {};
parser.on('error', function(err) {
req.destroy();
next(err);
});
parser.on('part', function(field, part) {
set(parts, field, part);
});
parser.on('file', function(field, path, name) {
set(files, field, {
path: path,
name: name,
toString: function() {
return path;
}
});
});
parser.on('data', function() {
if (this.writtenDisk > diskLimit || this.written > limit) {
this.emit('error', new Error('Overflow.'));
this.destroy();
}
});
parser.on('end', next);
req.body = parts;
req.files = files;
req.pipe(parser);
};
/**
* Helpers
*/
var isWindows = process.platform === 'win32';
var stream = function(name, dir) {
var ext = path.extname(name) || ''
, name = path.basename(name, ext) || ''
, dir = dir || Parser.root
, tag;
tag = Math.random().toString(36).substring(2);
name = name.substring(0, 200) + '.' + tag;
name = path.join(dir, name) + ext.substring(0, 6);
name = name.replace(/\0/g, '');
if (isWindows) {
name = name.replace(/[:*<>|"?]/g, '');
}
return fs.createWriteStream(name);
};
var grab = function(str, name) {
if (!str) return;
var rx = new RegExp('\\b' + name + '\\s*=\\s*("[^"]+"|\'[^\']+\'|[^;,]+)', 'i')
, cap = rx.exec(str);
if (cap) {
return cap[1].trim().replace(/^['"]|['"]$/g, '');
}
};
/**
* Expose
*/
module.exports = Parser;

239
node_modules/dicer/lib/Dicer.js

@ -0,0 +1,239 @@
var WritableStream = require('stream').Writable,
inherits = require('util').inherits;
var StreamSearch = require('streamsearch');
var PartStream = require('./PartStream'),
HeaderParser = require('./HeaderParser');
var DASH = 45,
B_ONEDASH = Buffer.from('-'),
B_CRLF = Buffer.from('\r\n'),
EMPTY_FN = function() {};
function Dicer(cfg) {
if (!(this instanceof Dicer))
return new Dicer(cfg);
WritableStream.call(this, cfg);
if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string'))
throw new TypeError('Boundary required');
if (typeof cfg.boundary === 'string')
this.setBoundary(cfg.boundary);
else
this._bparser = undefined;
this._headerFirst = cfg.headerFirst;
var self = this;
this._dashes = 0;
this._parts = 0;
this._finished = false;
this._realFinish = false;
this._isPreamble = true;
this._justMatched = false;
this._firstWrite = true;
this._inHeader = true;
this._part = undefined;
this._cb = undefined;
this._ignoreData = false;
this._partOpts = (typeof cfg.partHwm === 'number'
? { highWaterMark: cfg.partHwm }
: {});
this._pause = false;
this._hparser = new HeaderParser(cfg);
this._hparser.on('header', function(header) {
self._inHeader = false;
self._part.emit('header', header);
});
}
inherits(Dicer, WritableStream);
Dicer.prototype.emit = function(ev) {
if (ev === 'finish' && !this._realFinish) {
if (!this._finished) {
var self = this;
process.nextTick(function() {
self.emit('error', new Error('Unexpected end of multipart data'));
if (self._part && !self._ignoreData) {
var type = (self._isPreamble ? 'Preamble' : 'Part');
self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'));
self._part.push(null);
process.nextTick(function() {
self._realFinish = true;
self.emit('finish');
self._realFinish = false;
});
return;
}
self._realFinish = true;
self.emit('finish');
self._realFinish = false;
});
}
} else
WritableStream.prototype.emit.apply(this, arguments);
};
Dicer.prototype._write = function(data, encoding, cb) {
// ignore unexpected data (e.g. extra trailer data after finished)
if (!this._hparser && !this._bparser)
return cb();
if (this._headerFirst && this._isPreamble) {
if (!this._part) {
this._part = new PartStream(this._partOpts);
if (this._events.preamble)
this.emit('preamble', this._part);
else
this._ignore();
}
var r = this._hparser.push(data);
if (!this._inHeader && r !== undefined && r < data.length)
data = data.slice(r);
else
return cb();
}
// allows for "easier" testing
if (this._firstWrite) {
this._bparser.push(B_CRLF);
this._firstWrite = false;
}
this._bparser.push(data);
if (this._pause)
this._cb = cb;
else
cb();
};
Dicer.prototype.reset = function() {
this._part = undefined;
this._bparser = undefined;
this._hparser = undefined;
};
Dicer.prototype.setBoundary = function(boundary) {
var self = this;
this._bparser = new StreamSearch('\r\n--' + boundary);
this._bparser.on('info', function(isMatch, data, start, end) {
self._oninfo(isMatch, data, start, end);
});
};
Dicer.prototype._ignore = function() {
if (this._part && !this._ignoreData) {
this._ignoreData = true;
this._part.on('error', EMPTY_FN);
// we must perform some kind of read on the stream even though we are
// ignoring the data, otherwise node's Readable stream will not emit 'end'
// after pushing null to the stream
this._part.resume();
}
};
Dicer.prototype._oninfo = function(isMatch, data, start, end) {
var buf, self = this, i = 0, r, ev, shouldWriteMore = true;
if (!this._part && this._justMatched && data) {
while (this._dashes < 2 && (start + i) < end) {
if (data[start + i] === DASH) {
++i;
++this._dashes;
} else {
if (this._dashes)
buf = B_ONEDASH;
this._dashes = 0;
break;
}
}
if (this._dashes === 2) {
if ((start + i) < end && this._events.trailer)
this.emit('trailer', data.slice(start + i, end));
this.reset();
this._finished = true;
// no more parts will be added
if (self._parts === 0) {
self._realFinish = true;
self.emit('finish');
self._realFinish = false;
}
}
if (this._dashes)
return;
}
if (this._justMatched)
this._justMatched = false;
if (!this._part) {
this._part = new PartStream(this._partOpts);
this._part._read = function(n) {
self._unpause();
};
ev = this._isPreamble ? 'preamble' : 'part';
if (this._events[ev])
this.emit(ev, this._part);
else
this._ignore();
if (!this._isPreamble)
this._inHeader = true;
}
if (data && start < end && !this._ignoreData) {
if (this._isPreamble || !this._inHeader) {
if (buf)
shouldWriteMore = this._part.push(buf);
shouldWriteMore = this._part.push(data.slice(start, end));
if (!shouldWriteMore)
this._pause = true;
} else if (!this._isPreamble && this._inHeader) {
if (buf)
this._hparser.push(buf);
r = this._hparser.push(data.slice(start, end));
if (!this._inHeader && r !== undefined && r < end)
this._oninfo(false, data, start + r, end);
}
}
if (isMatch) {
this._hparser.reset();
if (this._isPreamble)
this._isPreamble = false;
else {
++this._parts;
this._part.on('end', function() {
if (--self._parts === 0) {
if (self._finished) {
self._realFinish = true;
self.emit('finish');
self._realFinish = false;
} else {
self._unpause();
}
}
});
}
this._part.push(null);
this._part = undefined;
this._ignoreData = false;
this._justMatched = true;
this._dashes = 0;
}
};
Dicer.prototype._unpause = function() {
if (!this._pause)
return;
this._pause = false;
if (this._cb) {
var cb = this._cb;
this._cb = undefined;
cb();
}
};
module.exports = Dicer;

110
node_modules/dicer/lib/HeaderParser.js

@ -0,0 +1,110 @@
var EventEmitter = require('events').EventEmitter,
inherits = require('util').inherits;
var StreamSearch = require('streamsearch');
var B_DCRLF = Buffer.from('\r\n\r\n'),
RE_CRLF = /\r\n/g,
RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/,
MAX_HEADER_PAIRS = 2000, // from node's http.js
MAX_HEADER_SIZE = 80 * 1024; // from node's http_parser
function HeaderParser(cfg) {
EventEmitter.call(this);
var self = this;
this.nread = 0;
this.maxed = false;
this.npairs = 0;
this.maxHeaderPairs = (cfg && typeof cfg.maxHeaderPairs === 'number'
? cfg.maxHeaderPairs
: MAX_HEADER_PAIRS);
this.buffer = '';
this.header = {};
this.finished = false;
this.ss = new StreamSearch(B_DCRLF);
this.ss.on('info', function(isMatch, data, start, end) {
if (data && !self.maxed) {
if (self.nread + (end - start) > MAX_HEADER_SIZE) {
end = (MAX_HEADER_SIZE - self.nread);
self.nread = MAX_HEADER_SIZE;
} else
self.nread += (end - start);
if (self.nread === MAX_HEADER_SIZE)
self.maxed = true;
self.buffer += data.toString('binary', start, end);
}
if (isMatch)
self._finish();
});
}
inherits(HeaderParser, EventEmitter);
HeaderParser.prototype.push = function(data) {
var r = this.ss.push(data);
if (this.finished)
return r;
};
HeaderParser.prototype.reset = function() {
this.finished = false;
this.buffer = '';
this.header = {};
this.ss.reset();
};
HeaderParser.prototype._finish = function() {
if (this.buffer)
this._parseHeader();
this.ss.matches = this.ss.maxMatches;
var header = this.header;
this.header = {};
this.buffer = '';
this.finished = true;
this.nread = this.npairs = 0;
this.maxed = false;
this.emit('header', header);
};
HeaderParser.prototype._parseHeader = function() {
if (this.npairs === this.maxHeaderPairs)
return;
var lines = this.buffer.split(RE_CRLF), len = lines.length, m, h,
modded = false;
for (var i = 0; i < len; ++i) {
if (lines[i].length === 0)
continue;
if (lines[i][0] === '\t' || lines[i][0] === ' ') {
// folded header content
// RFC2822 says to just remove the CRLF and not the whitespace following
// it, so we follow the RFC and include the leading whitespace ...
this.header[h][this.header[h].length - 1] += lines[i];
} else {
m = RE_HDR.exec(lines[i]);
if (m) {
h = m[1].toLowerCase();
if (m[2]) {
if (this.header[h] === undefined)
this.header[h] = [m[2]];
else
this.header[h].push(m[2]);
} else
this.header[h] = [''];
if (++this.npairs === this.maxHeaderPairs)
break;
} else {
this.buffer = lines[i];
modded = true;
break;
}
}
}
if (!modded)
this.buffer = '';
};
module.exports = HeaderParser;

11
node_modules/dicer/lib/PartStream.js

@ -0,0 +1,11 @@
var inherits = require('util').inherits,
ReadableStream = require('stream').Readable;
function PartStream(opts) {
ReadableStream.call(this, opts);
}
inherits(PartStream, ReadableStream);
PartStream.prototype._read = function(n) {};
module.exports = PartStream;

16
node_modules/dicer/package.json

@ -0,0 +1,16 @@
{ "name": "dicer",
"version": "0.3.0",
"author": "Brian White <mscdex@mscdex.net>",
"description": "A very fast streaming multipart parser for node.js",
"main": "./lib/Dicer",
"dependencies": {
"streamsearch": "0.1.2"
},
"scripts": {
"test": "node test/test.js"
},
"engines": { "node": ">=4.5.0" },
"keywords": [ "parser", "parse", "parsing", "multipart", "form-data", "streaming" ],
"licenses": [ { "type": "MIT", "url": "http://github.com/mscdex/dicer/raw/master/LICENSE" } ],
"repository" : { "type": "git", "url": "http://github.com/mscdex/dicer.git" }
}

31
node_modules/dicer/test/fixtures/many-noend/original

@ -0,0 +1,31 @@
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="_method"
put
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[blog]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[public_email]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[interests]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[bio]"
hello
"quote"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="commit"
Save
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="media"; filename=""
Content-Type: application/octet-stream

1
node_modules/dicer/test/fixtures/many-noend/part1

@ -0,0 +1 @@
put

1
node_modules/dicer/test/fixtures/many-noend/part1.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"_method\""]}

0
node_modules/dicer/test/fixtures/many-noend/part2

1
node_modules/dicer/test/fixtures/many-noend/part2.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"profile[blog]\""]}

0
node_modules/dicer/test/fixtures/many-noend/part3

1
node_modules/dicer/test/fixtures/many-noend/part3.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"profile[public_email]\""]}

0
node_modules/dicer/test/fixtures/many-noend/part4

1
node_modules/dicer/test/fixtures/many-noend/part4.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"profile[interests]\""]}

3
node_modules/dicer/test/fixtures/many-noend/part5

@ -0,0 +1,3 @@
hello
"quote"

1
node_modules/dicer/test/fixtures/many-noend/part5.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"profile[bio]\""]}

1
node_modules/dicer/test/fixtures/many-noend/part6

@ -0,0 +1 @@
Save

1
node_modules/dicer/test/fixtures/many-noend/part6.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"commit\""]}

2
node_modules/dicer/test/fixtures/many-noend/part7.header

@ -0,0 +1,2 @@
{"content-disposition": ["form-data; name=\"media\"; filename=\"\""],
"content-type": ["application/octet-stream"]}

32
node_modules/dicer/test/fixtures/many-wrongboundary/original

@ -0,0 +1,32 @@
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="_method"
put
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[blog]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[public_email]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[interests]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[bio]"
hello
"quote"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="media"; filename=""
Content-Type: application/octet-stream
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="commit"
Save
------WebKitFormBoundaryWLHCs9qmcJJoyjKR--

33
node_modules/dicer/test/fixtures/many-wrongboundary/preamble

@ -0,0 +1,33 @@
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="_method"
put
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[blog]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[public_email]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[interests]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[bio]"
hello
"quote"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="media"; filename=""
Content-Type: application/octet-stream
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="commit"
Save
------WebKitFormBoundaryWLHCs9qmcJJoyjKR--

1
node_modules/dicer/test/fixtures/many-wrongboundary/preamble.error

@ -0,0 +1 @@
Preamble terminated early due to unexpected end of multipart data

32
node_modules/dicer/test/fixtures/many/original

@ -0,0 +1,32 @@
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="_method"
put
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[blog]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[public_email]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[interests]"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="profile[bio]"
hello
"quote"
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="media"; filename=""
Content-Type: application/octet-stream
------WebKitFormBoundaryWLHCs9qmcJJoyjKR
Content-Disposition: form-data; name="commit"
Save
------WebKitFormBoundaryWLHCs9qmcJJoyjKR--

1
node_modules/dicer/test/fixtures/many/part1

@ -0,0 +1 @@
put

1
node_modules/dicer/test/fixtures/many/part1.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"_method\""]}

0
node_modules/dicer/test/fixtures/many/part2

1
node_modules/dicer/test/fixtures/many/part2.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"profile[blog]\""]}

0
node_modules/dicer/test/fixtures/many/part3

1
node_modules/dicer/test/fixtures/many/part3.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"profile[public_email]\""]}

0
node_modules/dicer/test/fixtures/many/part4

1
node_modules/dicer/test/fixtures/many/part4.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"profile[interests]\""]}

3
node_modules/dicer/test/fixtures/many/part5

@ -0,0 +1,3 @@
hello
"quote"

1
node_modules/dicer/test/fixtures/many/part5.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"profile[bio]\""]}

0
node_modules/dicer/test/fixtures/many/part6

2
node_modules/dicer/test/fixtures/many/part6.header

@ -0,0 +1,2 @@
{"content-disposition": ["form-data; name=\"media\"; filename=\"\""],
"content-type": ["application/octet-stream"]}

1
node_modules/dicer/test/fixtures/many/part7

@ -0,0 +1 @@
Save

1
node_modules/dicer/test/fixtures/many/part7.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"commit\""]}

24
node_modules/dicer/test/fixtures/nested-full/original

@ -0,0 +1,24 @@
User-Agent: foo bar baz
Content-Type: multipart/form-data; boundary=AaB03x
--AaB03x
Content-Disposition: form-data; name="foo"
bar
--AaB03x
Content-Disposition: form-data; name="files"
Content-Type: multipart/mixed, boundary=BbC04y
--BbC04y
Content-Disposition: attachment; filename="file.txt"
Content-Type: text/plain
contents
--BbC04y
Content-Disposition: attachment; filename="flowers.jpg"
Content-Type: image/jpeg
Content-Transfer-Encoding: binary
contents
--BbC04y--
--AaB03x--

1
node_modules/dicer/test/fixtures/nested-full/part1

@ -0,0 +1 @@
bar

1
node_modules/dicer/test/fixtures/nested-full/part1.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"foo\""]}

12
node_modules/dicer/test/fixtures/nested-full/part2

@ -0,0 +1,12 @@
--BbC04y
Content-Disposition: attachment; filename="file.txt"
Content-Type: text/plain
contents
--BbC04y
Content-Disposition: attachment; filename="flowers.jpg"
Content-Type: image/jpeg
Content-Transfer-Encoding: binary
contents
--BbC04y--

2
node_modules/dicer/test/fixtures/nested-full/part2.header

@ -0,0 +1,2 @@
{"content-disposition": ["form-data; name=\"files\""],
"content-type": ["multipart/mixed, boundary=BbC04y"]}

2
node_modules/dicer/test/fixtures/nested-full/preamble.header

@ -0,0 +1,2 @@
{"user-agent": ["foo bar baz"],
"content-type": ["multipart/form-data; boundary=AaB03x"]}

21
node_modules/dicer/test/fixtures/nested/original

@ -0,0 +1,21 @@
--AaB03x
Content-Disposition: form-data; name="foo"
bar
--AaB03x
Content-Disposition: form-data; name="files"
Content-Type: multipart/mixed, boundary=BbC04y
--BbC04y
Content-Disposition: attachment; filename="file.txt"
Content-Type: text/plain
contents
--BbC04y
Content-Disposition: attachment; filename="flowers.jpg"
Content-Type: image/jpeg
Content-Transfer-Encoding: binary
contents
--BbC04y--
--AaB03x--

1
node_modules/dicer/test/fixtures/nested/part1

@ -0,0 +1 @@
bar

1
node_modules/dicer/test/fixtures/nested/part1.header

@ -0,0 +1 @@
{"content-disposition": ["form-data; name=\"foo\""]}

12
node_modules/dicer/test/fixtures/nested/part2

@ -0,0 +1,12 @@
--BbC04y
Content-Disposition: attachment; filename="file.txt"
Content-Type: text/plain
contents
--BbC04y
Content-Disposition: attachment; filename="flowers.jpg"
Content-Type: image/jpeg
Content-Transfer-Encoding: binary
contents
--BbC04y--

2
node_modules/dicer/test/fixtures/nested/part2.header

@ -0,0 +1,2 @@
{"content-disposition": ["form-data; name=\"files\""],
"content-type": ["multipart/mixed, boundary=BbC04y"]}

87
node_modules/dicer/test/test-endfinish.js

@ -0,0 +1,87 @@
var Dicer = require('..');
var assert = require('assert');
var CRLF = '\r\n';
var boundary = 'boundary';
var writeSep = '--' + boundary;
var writePart = [
writeSep,
'Content-Type: text/plain',
'Content-Length: 0'
].join(CRLF)
+ CRLF + CRLF
+ 'some data' + CRLF;
var writeEnd = '--' + CRLF;
var firedEnd = false;
var firedFinish = false;
var dicer = new Dicer({boundary: boundary});
dicer.on('part', partListener);
dicer.on('finish', finishListener);
dicer.write(writePart+writeSep);
function partListener(partReadStream) {
partReadStream.on('data', function(){});
partReadStream.on('end', partEndListener);
}
function partEndListener() {
firedEnd = true;
setImmediate(afterEnd);
}
function afterEnd() {
dicer.end(writeEnd);
setImmediate(afterWrite);
}
function finishListener() {
assert(firedEnd, 'Failed to end before finishing');
firedFinish = true;
test2();
}
function afterWrite() {
assert(firedFinish, 'Failed to finish');
}
var isPausePush = true;
var firedPauseCallback = false;
var firedPauseFinish = false;
var dicer2 = null;
function test2() {
dicer2 = new Dicer({boundary: boundary});
dicer2.on('part', pausePartListener);
dicer2.on('finish', pauseFinish);
dicer2.write(writePart+writeSep, 'utf8', pausePartCallback);
setImmediate(pauseAfterWrite);
}
function pausePartListener(partReadStream) {
partReadStream.on('data', function(){});
partReadStream.on('end', function(){});
var realPush = partReadStream.push;
partReadStream.push = function fakePush() {
realPush.apply(partReadStream, arguments);
if (!isPausePush)
return true;
isPausePush = false;
return false;
};
}
function pauseAfterWrite() {
dicer2.end(writeEnd);
setImmediate(pauseAfterEnd);
}
function pauseAfterEnd() {
assert(firedPauseCallback, 'Failed to call callback after pause');
assert(firedPauseFinish, 'Failed to finish after pause');
}
function pauseFinish() {
firedPauseFinish = true;
}
function pausePartCallback() {
firedPauseCallback = true;
}

68
node_modules/dicer/test/test-headerparser.js

@ -0,0 +1,68 @@
var assert = require('assert'),
path = require('path');
var HeaderParser = require('../lib/HeaderParser');
var DCRLF = '\r\n\r\n',
MAXED_BUFFER = Buffer.allocUnsafe(128 * 1024);
MAXED_BUFFER.fill(0x41); // 'A'
var group = path.basename(__filename, '.js') + '/';
[
{ source: DCRLF,
expected: {},
what: 'No header'
},
{ source: ['Content-Type:\t text/plain',
'Content-Length:0'
].join('\r\n') + DCRLF,
expected: {'content-type': [' text/plain'], 'content-length': ['0']},
what: 'Value spacing'
},
{ source: ['Content-Type:\r\n text/plain',
'Foo:\r\n bar\r\n baz',
].join('\r\n') + DCRLF,
expected: {'content-type': [' text/plain'], 'foo': [' bar baz']},
what: 'Folded values'
},
{ source: ['Content-Type:',
'Foo: ',
].join('\r\n') + DCRLF,
expected: {'content-type': [''], 'foo': ['']},
what: 'Empty values'
},
{ source: MAXED_BUFFER.toString('ascii') + DCRLF,
expected: {},
what: 'Max header size (single chunk)'
},
{ source: ['ABCDEFGHIJ', MAXED_BUFFER.toString('ascii'), DCRLF],
expected: {},
what: 'Max header size (multiple chunks #1)'
},
{ source: [MAXED_BUFFER.toString('ascii'), MAXED_BUFFER.toString('ascii'), DCRLF],
expected: {},
what: 'Max header size (multiple chunk #2)'
},
].forEach(function(v) {
var parser = new HeaderParser(),
fired = false;
parser.on('header', function(header) {
assert(!fired, makeMsg(v.what, 'Header event fired more than once'));
fired = true;
assert.deepEqual(header,
v.expected,
makeMsg(v.what, 'Parsed result mismatch'));
});
if (!Array.isArray(v.source))
v.source = [v.source];
v.source.forEach(function(s) {
parser.push(s);
});
assert(fired, makeMsg(v.what, 'Did not receive header from parser'));
});
function makeMsg(what, msg) {
return '[' + group + what + ']: ' + msg;
}

148
node_modules/dicer/test/test-multipart-extra-trailer.js

@ -0,0 +1,148 @@
var Dicer = require('..');
var assert = require('assert'),
fs = require('fs'),
path = require('path'),
inspect = require('util').inspect;
var FIXTURES_ROOT = __dirname + '/fixtures/';
var t = 0,
group = path.basename(__filename, '.js') + '/';
var tests = [
{ source: 'many',
opts: { boundary: '----WebKitFormBoundaryWLHCs9qmcJJoyjKR' },
chsize: 16,
nparts: 7,
what: 'Extra trailer data pushed after finished'
},
];
function next() {
if (t === tests.length)
return;
var v = tests[t],
fixtureBase = FIXTURES_ROOT + v.source,
fd,
n = 0,
buffer = Buffer.allocUnsafe(v.chsize),
state = { parts: [] };
fd = fs.openSync(fixtureBase + '/original', 'r');
var dicer = new Dicer(v.opts),
error,
partErrors = 0,
finishes = 0;
dicer.on('part', function(p) {
var part = {
body: undefined,
bodylen: 0,
error: undefined,
header: undefined
};
p.on('header', function(h) {
part.header = h;
}).on('data', function(data) {
// make a copy because we are using readSync which re-uses a buffer ...
var copy = Buffer.allocUnsafe(data.length);
data.copy(copy);
data = copy;
if (!part.body)
part.body = [ data ];
else
part.body.push(data);
part.bodylen += data.length;
}).on('error', function(err) {
part.error = err;
++partErrors;
}).on('end', function() {
if (part.body)
part.body = Buffer.concat(part.body, part.bodylen);
state.parts.push(part);
});
}).on('error', function(err) {
error = err;
}).on('finish', function() {
assert(finishes++ === 0, makeMsg(v.what, 'finish emitted multiple times'));
if (v.dicerError)
assert(error !== undefined, makeMsg(v.what, 'Expected error'));
else
assert(error === undefined, makeMsg(v.what, 'Unexpected error'));
if (v.events && v.events.indexOf('part') > -1) {
assert.equal(state.parts.length,
v.nparts,
makeMsg(v.what,
'Part count mismatch:\nActual: '
+ state.parts.length
+ '\nExpected: '
+ v.nparts));
if (!v.npartErrors)
v.npartErrors = 0;
assert.equal(partErrors,
v.npartErrors,
makeMsg(v.what,
'Part errors mismatch:\nActual: '
+ partErrors
+ '\nExpected: '
+ v.npartErrors));
for (var i = 0, header, body; i < v.nparts; ++i) {
if (fs.existsSync(fixtureBase + '/part' + (i+1))) {
body = fs.readFileSync(fixtureBase + '/part' + (i+1));
if (body.length === 0)
body = undefined;
} else
body = undefined;
assert.deepEqual(state.parts[i].body,
body,
makeMsg(v.what,
'Part #' + (i+1) + ' body mismatch'));
if (fs.existsSync(fixtureBase + '/part' + (i+1) + '.header')) {
header = fs.readFileSync(fixtureBase
+ '/part' + (i+1) + '.header', 'binary');
header = JSON.parse(header);
} else
header = undefined;
assert.deepEqual(state.parts[i].header,
header,
makeMsg(v.what,
'Part #' + (i+1)
+ ' parsed header mismatch:\nActual: '
+ inspect(state.parts[i].header)
+ '\nExpected: '
+ inspect(header)));
}
}
++t;
next();
});
while (true) {
n = fs.readSync(fd, buffer, 0, buffer.length, null);
if (n === 0) {
setTimeout(function() {
dicer.write('\r\n\r\n\r\n');
dicer.end();
}, 50);
break;
}
dicer.write(n === buffer.length ? buffer : buffer.slice(0, n));
}
fs.closeSync(fd);
}
next();
function makeMsg(what, msg) {
return '[' + group + what + ']: ' + msg;
}
process.on('exit', function() {
assert(t === tests.length,
makeMsg('_exit', 'Only ran ' + t + '/' + tests.length + ' tests'));
});

228
node_modules/dicer/test/test-multipart-nolisteners.js

@ -0,0 +1,228 @@
var Dicer = require('..');
var assert = require('assert'),
fs = require('fs'),
path = require('path'),
inspect = require('util').inspect;
var FIXTURES_ROOT = __dirname + '/fixtures/';
var t = 0,
group = path.basename(__filename, '.js') + '/';
var tests = [
{ source: 'many',
opts: { boundary: '----WebKitFormBoundaryWLHCs9qmcJJoyjKR' },
chsize: 16,
nparts: 0,
what: 'No preamble or part listeners'
},
];
function next() {
if (t === tests.length)
return;
var v = tests[t],
fixtureBase = FIXTURES_ROOT + v.source,
fd,
n = 0,
buffer = Buffer.allocUnsafe(v.chsize),
state = { done: false, parts: [], preamble: undefined };
fd = fs.openSync(fixtureBase + '/original', 'r');
var dicer = new Dicer(v.opts),
error,
partErrors = 0,
finishes = 0;
if (v.events && v.events.indexOf('preamble') > -1) {
dicer.on('preamble', function(p) {
var preamble = {
body: undefined,
bodylen: 0,
error: undefined,
header: undefined
};
p.on('header', function(h) {
preamble.header = h;
}).on('data', function(data) {
// make a copy because we are using readSync which re-uses a buffer ...
var copy = Buffer.allocUnsafe(data.length);
data.copy(copy);
data = copy;
if (!preamble.body)
preamble.body = [ data ];
else
preamble.body.push(data);
preamble.bodylen += data.length;
}).on('error', function(err) {
preamble.error = err;
}).on('end', function() {
if (preamble.body)
preamble.body = Buffer.concat(preamble.body, preamble.bodylen);
if (preamble.body || preamble.header)
state.preamble = preamble;
});
});
}
if (v.events && v.events.indexOf('part') > -1) {
dicer.on('part', function(p) {
var part = {
body: undefined,
bodylen: 0,
error: undefined,
header: undefined
};
p.on('header', function(h) {
part.header = h;
}).on('data', function(data) {
// make a copy because we are using readSync which re-uses a buffer ...
var copy = Buffer.allocUnsafe(data.length);
data.copy(copy);
data = copy;
if (!part.body)
part.body = [ data ];
else
part.body.push(data);
part.bodylen += data.length;
}).on('error', function(err) {
part.error = err;
++partErrors;
}).on('end', function() {
if (part.body)
part.body = Buffer.concat(part.body, part.bodylen);
state.parts.push(part);
});
});
}
dicer.on('error', function(err) {
error = err;
}).on('finish', function() {
assert(finishes++ === 0, makeMsg(v.what, 'finish emitted multiple times'));
if (v.dicerError)
assert(error !== undefined, makeMsg(v.what, 'Expected error'));
else
assert(error === undefined, makeMsg(v.what, 'Unexpected error'));
if (v.events && v.events.indexOf('preamble') > -1) {
var preamble;
if (fs.existsSync(fixtureBase + '/preamble')) {
var prebody = fs.readFileSync(fixtureBase + '/preamble');
if (prebody.length) {
preamble = {
body: prebody,
bodylen: prebody.length,
error: undefined,
header: undefined
};
}
}
if (fs.existsSync(fixtureBase + '/preamble.header')) {
var prehead = JSON.parse(fs.readFileSync(fixtureBase
+ '/preamble.header', 'binary'));
if (!preamble) {
preamble = {
body: undefined,
bodylen: 0,
error: undefined,
header: prehead
};
} else
preamble.header = prehead;
}
if (fs.existsSync(fixtureBase + '/preamble.error')) {
var err = new Error(fs.readFileSync(fixtureBase
+ '/preamble.error', 'binary'));
if (!preamble) {
preamble = {
body: undefined,
bodylen: 0,
error: err,
header: undefined
};
} else
preamble.error = err;
}
assert.deepEqual(state.preamble,
preamble,
makeMsg(v.what,
'Preamble mismatch:\nActual:'
+ inspect(state.preamble)
+ '\nExpected: '
+ inspect(preamble)));
}
if (v.events && v.events.indexOf('part') > -1) {
assert.equal(state.parts.length,
v.nparts,
makeMsg(v.what,
'Part count mismatch:\nActual: '
+ state.parts.length
+ '\nExpected: '
+ v.nparts));
if (!v.npartErrors)
v.npartErrors = 0;
assert.equal(partErrors,
v.npartErrors,
makeMsg(v.what,
'Part errors mismatch:\nActual: '
+ partErrors
+ '\nExpected: '
+ v.npartErrors));
for (var i = 0, header, body; i < v.nparts; ++i) {
if (fs.existsSync(fixtureBase + '/part' + (i+1))) {
body = fs.readFileSync(fixtureBase + '/part' + (i+1));
if (body.length === 0)
body = undefined;
} else
body = undefined;
assert.deepEqual(state.parts[i].body,
body,
makeMsg(v.what,
'Part #' + (i+1) + ' body mismatch'));
if (fs.existsSync(fixtureBase + '/part' + (i+1) + '.header')) {
header = fs.readFileSync(fixtureBase
+ '/part' + (i+1) + '.header', 'binary');
header = JSON.parse(header);
} else
header = undefined;
assert.deepEqual(state.parts[i].header,
header,
makeMsg(v.what,
'Part #' + (i+1)
+ ' parsed header mismatch:\nActual: '
+ inspect(state.parts[i].header)
+ '\nExpected: '
+ inspect(header)));
}
}
++t;
next();
});
while (true) {
n = fs.readSync(fd, buffer, 0, buffer.length, null);
if (n === 0) {
dicer.end();
break;
}
dicer.write(n === buffer.length ? buffer : buffer.slice(0, n));
}
fs.closeSync(fd);
}
next();
function makeMsg(what, msg) {
return '[' + group + what + ']: ' + msg;
}
process.on('exit', function() {
assert(t === tests.length,
makeMsg('_exit', 'Only ran ' + t + '/' + tests.length + ' tests'));
});

240
node_modules/dicer/test/test-multipart.js

@ -0,0 +1,240 @@
var Dicer = require('..');
var assert = require('assert'),
fs = require('fs'),
path = require('path'),
inspect = require('util').inspect;
var FIXTURES_ROOT = __dirname + '/fixtures/';
var t = 0,
group = path.basename(__filename, '.js') + '/';
var tests = [
{ source: 'nested',
opts: { boundary: 'AaB03x' },
chsize: 32,
nparts: 2,
what: 'One nested multipart'
},
{ source: 'many',
opts: { boundary: '----WebKitFormBoundaryWLHCs9qmcJJoyjKR' },
chsize: 16,
nparts: 7,
what: 'Many parts'
},
{ source: 'many-wrongboundary',
opts: { boundary: 'LOLOLOL' },
chsize: 8,
nparts: 0,
dicerError: true,
what: 'Many parts, wrong boundary'
},
{ source: 'many-noend',
opts: { boundary: '----WebKitFormBoundaryWLHCs9qmcJJoyjKR' },
chsize: 16,
nparts: 7,
npartErrors: 1,
dicerError: true,
what: 'Many parts, end boundary missing, 1 file open'
},
{ source: 'nested-full',
opts: { boundary: 'AaB03x', headerFirst: true },
chsize: 32,
nparts: 2,
what: 'One nested multipart with preceding header'
},
{ source: 'nested-full',
opts: { headerFirst: true },
chsize: 32,
nparts: 2,
setBoundary: 'AaB03x',
what: 'One nested multipart with preceding header, using setBoundary'
},
];
function next() {
if (t === tests.length)
return;
var v = tests[t],
fixtureBase = FIXTURES_ROOT + v.source,
n = 0,
buffer = Buffer.allocUnsafe(v.chsize),
state = { parts: [], preamble: undefined };
var dicer = new Dicer(v.opts),
error,
partErrors = 0,
finishes = 0;
dicer.on('preamble', function(p) {
var preamble = {
body: undefined,
bodylen: 0,
error: undefined,
header: undefined
};
p.on('header', function(h) {
preamble.header = h;
if (v.setBoundary)
dicer.setBoundary(v.setBoundary);
}).on('data', function(data) {
// make a copy because we are using readSync which re-uses a buffer ...
var copy = Buffer.allocUnsafe(data.length);
data.copy(copy);
data = copy;
if (!preamble.body)
preamble.body = [ data ];
else
preamble.body.push(data);
preamble.bodylen += data.length;
}).on('error', function(err) {
preamble.error = err;
}).on('end', function() {
if (preamble.body)
preamble.body = Buffer.concat(preamble.body, preamble.bodylen);
if (preamble.body || preamble.header)
state.preamble = preamble;
});
});
dicer.on('part', function(p) {
var part = {
body: undefined,
bodylen: 0,
error: undefined,
header: undefined
};
p.on('header', function(h) {
part.header = h;
}).on('data', function(data) {
if (!part.body)
part.body = [ data ];
else
part.body.push(data);
part.bodylen += data.length;
}).on('error', function(err) {
part.error = err;
++partErrors;
}).on('end', function() {
if (part.body)
part.body = Buffer.concat(part.body, part.bodylen);
state.parts.push(part);
});
}).on('error', function(err) {
error = err;
}).on('finish', function() {
assert(finishes++ === 0, makeMsg(v.what, 'finish emitted multiple times'));
if (v.dicerError)
assert(error !== undefined, makeMsg(v.what, 'Expected error'));
else
assert(error === undefined, makeMsg(v.what, 'Unexpected error: ' + error));
var preamble;
if (fs.existsSync(fixtureBase + '/preamble')) {
var prebody = fs.readFileSync(fixtureBase + '/preamble');
if (prebody.length) {
preamble = {
body: prebody,
bodylen: prebody.length,
error: undefined,
header: undefined
};
}
}
if (fs.existsSync(fixtureBase + '/preamble.header')) {
var prehead = JSON.parse(fs.readFileSync(fixtureBase
+ '/preamble.header', 'binary'));
if (!preamble) {
preamble = {
body: undefined,
bodylen: 0,
error: undefined,
header: prehead
};
} else
preamble.header = prehead;
}
if (fs.existsSync(fixtureBase + '/preamble.error')) {
var err = new Error(fs.readFileSync(fixtureBase
+ '/preamble.error', 'binary'));
if (!preamble) {
preamble = {
body: undefined,
bodylen: 0,
error: err,
header: undefined
};
} else
preamble.error = err;
}
assert.deepEqual(state.preamble,
preamble,
makeMsg(v.what,
'Preamble mismatch:\nActual:'
+ inspect(state.preamble)
+ '\nExpected: '
+ inspect(preamble)));
assert.equal(state.parts.length,
v.nparts,
makeMsg(v.what,
'Part count mismatch:\nActual: '
+ state.parts.length
+ '\nExpected: '
+ v.nparts));
if (!v.npartErrors)
v.npartErrors = 0;
assert.equal(partErrors,
v.npartErrors,
makeMsg(v.what,
'Part errors mismatch:\nActual: '
+ partErrors
+ '\nExpected: '
+ v.npartErrors));
for (var i = 0, header, body; i < v.nparts; ++i) {
if (fs.existsSync(fixtureBase + '/part' + (i+1))) {
body = fs.readFileSync(fixtureBase + '/part' + (i+1));
if (body.length === 0)
body = undefined;
} else
body = undefined;
assert.deepEqual(state.parts[i].body,
body,
makeMsg(v.what,
'Part #' + (i+1) + ' body mismatch'));
if (fs.existsSync(fixtureBase + '/part' + (i+1) + '.header')) {
header = fs.readFileSync(fixtureBase
+ '/part' + (i+1) + '.header', 'binary');
header = JSON.parse(header);
} else
header = undefined;
assert.deepEqual(state.parts[i].header,
header,
makeMsg(v.what,
'Part #' + (i+1)
+ ' parsed header mismatch:\nActual: '
+ inspect(state.parts[i].header)
+ '\nExpected: '
+ inspect(header)));
}
++t;
next();
});
fs.createReadStream(fixtureBase + '/original').pipe(dicer);
}
next();
function makeMsg(what, msg) {
return '[' + group + what + ']: ' + msg;
}
process.on('exit', function() {
assert(t === tests.length,
makeMsg('_exit', 'Only ran ' + t + '/' + tests.length + ' tests'));
});

4
node_modules/dicer/test/test.js

@ -0,0 +1,4 @@
require('fs').readdirSync(__dirname).forEach(function(f) {
if (f.substr(0, 5) === 'test-')
require('./' + f);
});

32
node_modules/express-fileupload/.circleci/config.yml

@ -0,0 +1,32 @@
version: 2.1
orbs:
node: circleci/node@4.7
jobs:
lintandcoverage:
docker:
- image: cimg/node:16.13.2
steps:
- checkout
- run: npm install
- run: npm run lint
- run: npm run test
- run: npm run coveralls
workflows:
test:
jobs:
- lintandcoverage
- node/test:
version: '12.22.6'
pkg-manager: npm
- node/test:
version: '14.19.0'
pkg-manager: npm
- node/test:
version: '15.14.0'
pkg-manager: npm
- node/test:
version: '16.13.2'
pkg-manager: npm
- node/test:
version: '17.4.0'
pkg-manager: npm

1
node_modules/express-fileupload/.eslintignore

@ -0,0 +1 @@
coverage

23
node_modules/express-fileupload/.eslintrc

@ -0,0 +1,23 @@
{
"extends": [
"eslint:recommended"
],
"env": {
"node": true,
"mocha": true,
"es6": true
},
"parserOptions": {
"ecmaVersion": 6
},
"rules": {
"comma-dangle": [2, "never"],
"max-len": [2, {
"code": 100,
"tabWidth": 2
}],
"semi": 2,
"keyword-spacing": 2,
"indent": [2, 2, { "SwitchCase": 1 }]
}
}

1
node_modules/express-fileupload/.prettierrc

@ -0,0 +1 @@
{singleQuote: true}

22
node_modules/express-fileupload/LICENSE

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015 Richard Girges
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

125
node_modules/express-fileupload/README.md

@ -0,0 +1,125 @@
# express-fileupload
Simple express middleware for uploading files.
[![npm](https://img.shields.io/npm/v/express-fileupload.svg)](https://www.npmjs.org/package/express-fileupload)
[![downloads per month](http://img.shields.io/npm/dm/express-fileupload.svg)](https://www.npmjs.org/package/express-fileupload)
[![CircleCI](https://circleci.com/gh/richardgirges/express-fileupload/tree/master.svg?style=svg)](https://circleci.com/gh/richardgirges/express-fileupload/tree/master)
[![Coverage Status](https://img.shields.io/coveralls/richardgirges/express-fileupload.svg)](https://coveralls.io/r/richardgirges/express-fileupload)
# Security Notice
Please install version 1.1.10+ of this package to avoid a security vulnerability in Node/EJS related to JS prototype pollution. This vulnerability is only applicable if you have the `parseNested` option set to `true` (it is `false` by default).
# Install
```bash
# With NPM
npm i express-fileupload
# With Yarn
yarn add express-fileupload
```
# Usage
When you upload a file, the file will be accessible from `req.files`.
Example:
* You're uploading a file called **car.jpg**
* Your input's name field is **foo**: `<input name="foo" type="file" />`
* In your express server request, you can access your uploaded file from `req.files.foo`:
```javascript
app.post('/upload', function(req, res) {
console.log(req.files.foo); // the uploaded file object
});
```
The **req.files.foo** object will contain the following:
* `req.files.foo.name`: "car.jpg"
* `req.files.foo.mv`: A function to move the file elsewhere on your server. Can take a callback or return a promise.
* `req.files.foo.mimetype`: The mimetype of your file
* `req.files.foo.data`: A buffer representation of your file, returns empty buffer in case useTempFiles option was set to true.
* `req.files.foo.tempFilePath`: A path to the temporary file in case useTempFiles option was set to true.
* `req.files.foo.truncated`: A boolean that represents if the file is over the size limit
* `req.files.foo.size`: Uploaded size in bytes
* `req.files.foo.md5`: MD5 checksum of the uploaded file
**Notes about breaking changes with MD5 handling:**
* Before 1.0.0, `md5` is an MD5 checksum of the uploaded file.
* From 1.0.0 until 1.1.1, `md5` is a function to compute an MD5 hash ([Read about it here.](https://github.com/richardgirges/express-fileupload/releases/tag/v1.0.0-alpha.1)).
* From 1.1.1 onward, `md5` is reverted back to MD5 checksum value and also added full MD5 support in case you are using temporary files.
### Examples
* [Example Project](https://github.com/richardgirges/express-fileupload/tree/master/example)
* [Basic File Upload](https://github.com/richardgirges/express-fileupload/tree/master/example#basic-file-upload)
* [Multi-File Upload](https://github.com/richardgirges/express-fileupload/tree/master/example#multi-file-upload)
### Using Busboy Options
Pass in Busboy options directly to the express-fileupload middleware. [Check out the Busboy documentation here](https://github.com/mscdex/busboy#api).
```javascript
app.use(fileUpload({
limits: { fileSize: 50 * 1024 * 1024 },
}));
```
### Using useTempFile Options
Use temp files instead of memory for managing the upload process.
```javascript
// Note that this option available for versions 1.0.0 and newer.
app.use(fileUpload({
useTempFiles : true,
tempFileDir : '/tmp/'
}));
```
### Using debug option
You can set `debug` option to `true` to see some logging about upload process.
In this case middleware uses `console.log` and adds `Express-file-upload` prefix for outputs.
It will show you whether the request is invalid and also common events triggered during upload.
That can be really useful for troubleshooting and ***we recommend attaching debug output to each issue on Github***.
***Output example:***
```
Express-file-upload: Temporary file path is /node/express-fileupload/test/temp/tmp-16-1570084843942
Express-file-upload: New upload started testFile->car.png, bytes:0
Express-file-upload: Uploading testFile->car.png, bytes:21232...
Express-file-upload: Uploading testFile->car.png, bytes:86768...
Express-file-upload: Upload timeout testFile->car.png, bytes:86768
Express-file-upload: Cleaning up temporary file /node/express-fileupload/test/temp/tmp-16-1570084843942...
```
***Description:***
* `Temporary file path is...` says that `useTempfiles` was set to true and also shows you temp file name and path.
* `New upload started testFile->car.png` says that new upload started with field `testFile` and file name `car.png`.
* `Uploading testFile->car.png, bytes:21232...` shows current progress for each new data chunk.
* `Upload timeout` means that no data came during `uploadTimeout`.
* `Cleaning up temporary file` Here finaly we see cleaning up of the temporary file because of upload timeout reached.
### Available Options
Pass in non-Busboy options directly to the middleware. These are express-fileupload specific options.
Option | Acceptable&nbsp;Values | Details
--- | --- | ---
createParentPath | <ul><li><code>false</code>&nbsp;**(default)**</li><li><code>true</code></ul> | Automatically creates the directory path specified in `.mv(filePathName)`
uriDecodeFileNames | <ul><li><code>false</code>&nbsp;**(default)**</li><li><code>true</code></ul> | Applies uri decoding to file names if set true.
safeFileNames | <ul><li><code>false</code>&nbsp;**(default)**</li><li><code>true</code></li><li>regex</li></ul> | Strips characters from the upload's filename. You can use custom regex to determine what to strip. If set to `true`, non-alphanumeric characters _except_ dashes and underscores will be stripped. This option is off by default.<br /><br />**Example #1 (strip slashes from file names):** `app.use(fileUpload({ safeFileNames: /\\/g }))`<br />**Example #2:** `app.use(fileUpload({ safeFileNames: true }))`
preserveExtension | <ul><li><code>false</code>&nbsp;**(default)**</li><li><code>true</code></li><li><code>*Number*</code></li></ul> | Preserves filename extension when using <code>safeFileNames</code> option. If set to <code>true</code>, will default to an extension length of 3. If set to <code>*Number*</code>, this will be the max allowable extension length. If an extension is smaller than the extension length, it remains untouched. If the extension is longer, it is shifted.<br /><br />**Example #1 (true):**<br /><code>app.use(fileUpload({ safeFileNames: true, preserveExtension: true }));</code><br />*myFileName.ext* --> *myFileName.ext*<br /><br />**Example #2 (max extension length 2, extension shifted):**<br /><code>app.use(fileUpload({ safeFileNames: true, preserveExtension: 2 }));</code><br />*myFileName.ext* --> *myFileNamee.xt*
abortOnLimit | <ul><li><code>false</code>&nbsp;**(default)**</li><li><code>true</code></ul> | Returns a HTTP 413 when the file is bigger than the size limit if true. Otherwise, it will add a <code>truncated = true</code> to the resulting file structure.
responseOnLimit | <ul><li><code>'File size limit has been reached'</code>&nbsp;**(default)**</li><li><code>*String*</code></ul> | Response which will be send to client if file size limit exceeded when abortOnLimit set to true.
limitHandler | <ul><li><code>false</code>&nbsp;**(default)**</li><li><code>function(req, res, next)</code></li></ul> | User defined limit handler which will be invoked if the file is bigger than configured limits.
useTempFiles | <ul><li><code>false</code>&nbsp;**(default)**</li><li><code>true</code></ul> | By default this module uploads files into RAM. Setting this option to True turns on using temporary files instead of utilising RAM. This avoids memory overflow issues when uploading large files or in case of uploading lots of files at same time.
tempFileDir | <ul><li><code>String</code>&nbsp;**(path)**</li></ul> | Path to store temporary files.<br />Used along with the <code>useTempFiles</code> option. By default this module uses 'tmp' folder in the current working directory.<br />You can use trailing slash, but it is not necessary.
parseNested | <ul><li><code>false</code>&nbsp;**(default)**</li><li><code>true</code></li></ul> | By default, req.body and req.files are flattened like this: <code>{'name': 'John', 'hobbies[0]': 'Cinema', 'hobbies[1]': 'Bike'}</code><br /><br/>When this option is enabled they are parsed in order to be nested like this: <code>{'name': 'John', 'hobbies': ['Cinema', 'Bike']}</code>
debug | <ul><li><code>false</code>&nbsp;**(default)**</li><li><code>true</code></ul> | Turn on/off upload process logging. Can be useful for troubleshooting.
uploadTimeout | <ul><li><code>60000</code>&nbsp;**(default)**</li><li><code>Integer</code></ul> | This defines how long to wait for data before aborting. Set to 0 if you want to turn off timeout checks.
# Help Wanted
Looking for additional maintainers. Please contact `richardgirges [ at ] gmail.com` if you're interested. Pull Requests are welcome!
# Thanks & Credit
[Brian White](https://github.com/mscdex) for his stellar work on the [Busboy Package](https://github.com/mscdex/busboy) and the [connect-busboy Package](https://github.com/mscdex/connect-busboy)

70
node_modules/express-fileupload/example/README.md

@ -0,0 +1,70 @@
# express-fileupload Examples
## Basic File Upload
**Your node.js code:**
```javascript
const express = require('express');
const fileUpload = require('express-fileupload');
const app = express();
// default options
app.use(fileUpload());
app.post('/upload', function(req, res) {
let sampleFile;
let uploadPath;
if (!req.files || Object.keys(req.files).length === 0) {
return res.status(400).send('No files were uploaded.');
}
// The name of the input field (i.e. "sampleFile") is used to retrieve the uploaded file
sampleFile = req.files.sampleFile;
uploadPath = __dirname + '/somewhere/on/your/server/' + sampleFile.name;
// Use the mv() method to place the file somewhere on your server
sampleFile.mv(uploadPath, function(err) {
if (err)
return res.status(500).send(err);
res.send('File uploaded!');
});
});
```
**Your HTML file upload form:**
```html
<html>
<body>
<form ref='uploadForm'
id='uploadForm'
action='http://localhost:8000/upload'
method='post'
encType="multipart/form-data">
<input type="file" name="sampleFile" />
<input type='submit' value='Upload!' />
</form>
</body>
</html>
```
## Multi-File Upload
express-fileupload supports multiple file uploads at the same time.
Let's say you have three files in your form, each of the inputs with the name `my_profile_pic`, `my_pet`, and `my_cover_photo`:
```html
<input type="file" name="my_profile_pic" />
<input type="file" name="my_pet" />
<input type="file" name="my_cover_photo" />
```
These uploaded files would be accessible like so:
```javascript
app.post('/upload', function(req, res) {
// Uploaded files:
console.log(req.files.my_profile_pic.name);
console.log(req.files.my_pet.name);
console.log(req.files.my_cover_photo.name);
});
```

12
node_modules/express-fileupload/example/index.html

@ -0,0 +1,12 @@
<html>
<body>
<form ref='uploadForm'
id='uploadForm'
action='/upload'
method='post'
encType="multipart/form-data">
<input type="file" name="sampleFile" />
<input type='submit' value='Upload!' />
</form>
</body>
</html>

41
node_modules/express-fileupload/example/server.js

@ -0,0 +1,41 @@
const express = require('express');
const fileUpload = require('../lib/index');
const app = express();
const PORT = 8000;
app.use('/form', express.static(__dirname + '/index.html'));
// default options
app.use(fileUpload());
app.get('/ping', function(req, res) {
res.send('pong');
});
app.post('/upload', function(req, res) {
let sampleFile;
let uploadPath;
if (!req.files || Object.keys(req.files).length === 0) {
res.status(400).send('No files were uploaded.');
return;
}
console.log('req.files >>>', req.files); // eslint-disable-line
sampleFile = req.files.sampleFile;
uploadPath = __dirname + '/uploads/' + sampleFile.name;
sampleFile.mv(uploadPath, function(err) {
if (err) {
return res.status(500).send(err);
}
res.send('File uploaded to ' + uploadPath);
});
});
app.listen(PORT, function() {
console.log('Express server listening on port ', PORT); // eslint-disable-line
});

1
node_modules/express-fileupload/example/uploads/placeholder.txt

@ -0,0 +1 @@
files are placed here when uploaded using the upload.test.js express server

65
node_modules/express-fileupload/lib/fileFactory.js

@ -0,0 +1,65 @@
'use strict';
const {
isFunc,
debugLog,
moveFile,
promiseCallback,
checkAndMakeDir,
saveBufferToFile
} = require('./utilities');
/**
* Returns Local function that moves the file to a different location on the filesystem
* which takes two function arguments to make it compatible w/ Promise or Callback APIs
* @param {String} filePath - destination file path.
* @param {Object} options - file factory options.
* @param {Object} fileUploadOptions - middleware options.
* @returns {Function}
*/
const moveFromTemp = (filePath, options, fileUploadOptions) => (resolve, reject) => {
debugLog(fileUploadOptions, `Moving temporary file ${options.tempFilePath} to ${filePath}`);
moveFile(options.tempFilePath, filePath, promiseCallback(resolve, reject));
};
/**
* Returns Local function that moves the file from buffer to a different location on the filesystem
* which takes two function arguments to make it compatible w/ Promise or Callback APIs
* @param {String} filePath - destination file path.
* @param {Object} options - file factory options.
* @param {Object} fileUploadOptions - middleware options.
* @returns {Function}
*/
const moveFromBuffer = (filePath, options, fileUploadOptions) => (resolve, reject) => {
debugLog(fileUploadOptions, `Moving uploaded buffer to ${filePath}`);
saveBufferToFile(options.buffer, filePath, promiseCallback(resolve, reject));
};
module.exports = (options, fileUploadOptions = {}) => {
// see: https://github.com/richardgirges/express-fileupload/issues/14
// firefox uploads empty file in case of cache miss when f5ing page.
// resulting in unexpected behavior. if there is no file data, the file is invalid.
// if (!fileUploadOptions.useTempFiles && !options.buffer.length) return;
// Create and return file object.
return {
name: options.name,
data: options.buffer,
size: options.size,
encoding: options.encoding,
tempFilePath: options.tempFilePath,
truncated: options.truncated,
mimetype: options.mimetype,
md5: options.hash,
mv: (filePath, callback) => {
// Define a propper move function.
const moveFunc = fileUploadOptions.useTempFiles
? moveFromTemp(filePath, options, fileUploadOptions)
: moveFromBuffer(filePath, options, fileUploadOptions);
// Create a folder for a file.
checkAndMakeDir(fileUploadOptions, filePath);
// If callback is passed in, use the callback API, otherwise return a promise.
return isFunc(callback) ? moveFunc(callback) : new Promise(moveFunc);
}
};
};

39
node_modules/express-fileupload/lib/index.js

@ -0,0 +1,39 @@
'use strict';
const path = require('path');
const processMultipart = require('./processMultipart');
const isEligibleRequest = require('./isEligibleRequest');
const { buildOptions, debugLog } = require('./utilities');
const busboy = require('busboy'); // eslint-disable-line no-unused-vars
const DEFAULT_OPTIONS = {
debug: false,
uploadTimeout: 60000,
fileHandler: false,
uriDecodeFileNames: false,
safeFileNames: false,
preserveExtension: false,
abortOnLimit: false,
responseOnLimit: 'File size limit has been reached',
limitHandler: false,
createParentPath: false,
parseNested: false,
useTempFiles: false,
tempFileDir: path.join(process.cwd(), 'tmp')
};
/**
* Expose the file upload middleware
* @param {DEFAULT_OPTIONS & busboy.BusboyConfig} options - Middleware options.
* @returns {Function} - express-fileupload middleware.
*/
module.exports = (options) => {
const uploadOptions = buildOptions(DEFAULT_OPTIONS, options);
return (req, res, next) => {
if (!isEligibleRequest(req)) {
debugLog(uploadOptions, 'Request is not eligible for file upload!');
return next();
}
processMultipart(uploadOptions, req, res, next);
};
};

34
node_modules/express-fileupload/lib/isEligibleRequest.js

@ -0,0 +1,34 @@
const ACCEPTABLE_CONTENT_TYPE = /^(multipart\/.+);(.*)$/i;
const UNACCEPTABLE_METHODS = ['GET', 'HEAD'];
/**
* Ensures the request contains a content body
* @param {Object} req Express req object
* @returns {Boolean}
*/
const hasBody = (req) => {
return ('transfer-encoding' in req.headers) ||
('content-length' in req.headers && req.headers['content-length'] !== '0');
};
/**
* Ensures the request is not using a non-compliant multipart method
* such as GET or HEAD
* @param {Object} req Express req object
* @returns {Boolean}
*/
const hasAcceptableMethod = req => !UNACCEPTABLE_METHODS.includes(req.method);
/**
* Ensures that only multipart requests are processed by express-fileupload
* @param {Object} req Express req object
* @returns {Boolean}
*/
const hasAcceptableContentType = req => ACCEPTABLE_CONTENT_TYPE.test(req.headers['content-type']);
/**
* Ensures that the request in question is eligible for file uploads
* @param {Object} req Express req object
* @returns {Boolean}
*/
module.exports = req => hasBody(req) && hasAcceptableMethod(req) && hasAcceptableContentType(req);

42
node_modules/express-fileupload/lib/memHandler.js

@ -0,0 +1,42 @@
const crypto = require('crypto');
const { debugLog } = require('./utilities');
/**
* memHandler - In memory upload handler
* @param {Object} options
* @param {String} fieldname
* @param {String} filename
* @returns {Object}
*/
module.exports = (options, fieldname, filename) => {
const buffers = [];
const hash = crypto.createHash('md5');
let fileSize = 0;
let completed = false;
const getBuffer = () => Buffer.concat(buffers, fileSize);
return {
dataHandler: (data) => {
if (completed === true) {
debugLog(options, `Error: got ${fieldname}->${filename} data chunk for completed upload!`);
return;
}
buffers.push(data);
hash.update(data);
fileSize += data.length;
debugLog(options, `Uploading ${fieldname}->${filename}, bytes:${fileSize}...`);
},
getBuffer: getBuffer,
getFilePath: () => '',
getFileSize: () => fileSize,
getHash: () => hash.digest('hex'),
complete: () => {
debugLog(options, `Upload ${fieldname}->${filename} completed, bytes:${fileSize}.`);
completed = true;
return getBuffer();
},
cleanup: () => { completed = true; },
getWritePromise: () => Promise.resolve()
};
};

167
node_modules/express-fileupload/lib/processMultipart.js

@ -0,0 +1,167 @@
const Busboy = require('busboy');
const UploadTimer = require('./uploadtimer');
const fileFactory = require('./fileFactory');
const memHandler = require('./memHandler');
const tempFileHandler = require('./tempFileHandler');
const processNested = require('./processNested');
const {
isFunc,
debugLog,
buildFields,
buildOptions,
parseFileName
} = require('./utilities');
const waitFlushProperty = Symbol('wait flush property symbol');
/**
* Processes multipart request
* Builds a req.body object for fields
* Builds a req.files object for files
* @param {Object} options expressFileupload and Busboy options
* @param {Object} req Express request object
* @param {Object} res Express response object
* @param {Function} next Express next method
* @return {void}
*/
module.exports = (options, req, res, next) => {
req.files = null;
// Build busboy options and init busboy instance.
const busboyOptions = buildOptions(options, { headers: req.headers });
const busboy = new Busboy(busboyOptions);
// Close connection with specified reason and http code, default: 400 Bad Request.
const closeConnection = (code, reason) => {
req.unpipe(busboy);
res.writeHead(code || 400, { Connection: 'close' });
res.end(reason || 'Bad Request');
};
// Express proxies sometimes attach multipart data to a buffer
if (req.body instanceof Buffer) {
req.body = Object.create(null);
}
// Build multipart req.body fields
busboy.on('field', (field, val) => req.body = buildFields(req.body, field, val));
// Build req.files fields
busboy.on('file', (field, file, name, encoding, mime) => {
// Parse file name(cutting huge names, decoding, etc..).
const filename = parseFileName(options, name);
// Define methods and handlers for upload process.
const {
dataHandler,
getFilePath,
getFileSize,
getHash,
complete,
cleanup,
getWritePromise
} = options.useTempFiles
? tempFileHandler(options, field, filename) // Upload into temporary file.
: memHandler(options, field, filename); // Upload into RAM.
const writePromise = options.useTempFiles
? getWritePromise().catch(err => {
req.unpipe(busboy);
req.resume();
cleanup();
next(err);
}) : getWritePromise();
// Define upload timer.
const uploadTimer = new UploadTimer(options.uploadTimeout, () => {
file.removeAllListeners('data');
file.resume();
// After destroy an error event will be emitted and file clean up will be done.
file.destroy(new Error(`Upload timeout ${field}->${filename}, bytes:${getFileSize()}`));
});
file.on('limit', () => {
debugLog(options, `Size limit reached for ${field}->${filename}, bytes:${getFileSize()}`);
// Reset upload timer in case of file limit reached.
uploadTimer.clear();
// Run a user defined limit handler if it has been set.
if (isFunc(options.limitHandler)) return options.limitHandler(req, res, next);
// Close connection with 413 code and do cleanup if abortOnLimit set(default: false).
if (options.abortOnLimit) {
debugLog(options, `Aborting upload because of size limit ${field}->${filename}.`);
!isFunc(options.limitHandler) ? closeConnection(413, options.responseOnLimit) : '';
cleanup();
}
});
file.on('data', (data) => {
uploadTimer.set(); // Refresh upload timer each time new data chunk came.
dataHandler(data); // Handle new piece of data.
});
file.on('end', () => {
const size = getFileSize();
// Debug logging for file upload ending.
debugLog(options, `Upload finished ${field}->${filename}, bytes:${size}`);
// Reset upload timer in case of end event.
uploadTimer.clear();
// See https://github.com/richardgirges/express-fileupload/issues/191
// Do not add file instance to the req.files if original name and size are empty.
// Empty name and zero size indicates empty file field in the posted form.
if (!name && size === 0) {
if (options.useTempFiles) {
cleanup();
debugLog(options, `Removing the empty file ${field}->${filename}`);
}
return debugLog(options, `Don't add file instance if original name and size are empty`);
}
req.files = buildFields(req.files, field, fileFactory({
buffer: complete(),
name: filename,
tempFilePath: getFilePath(),
hash: getHash(),
size,
encoding,
truncated: file.truncated,
mimetype: mime
}, options));
if (!req[waitFlushProperty]) {
req[waitFlushProperty] = [];
}
req[waitFlushProperty].push(writePromise);
});
file.on('error', (err) => {
uploadTimer.clear(); // Reset upload timer in case of errors.
debugLog(options, err);
cleanup();
next();
});
// Debug logging for a new file upload.
debugLog(options, `New upload started ${field}->${filename}, bytes:${getFileSize()}`);
// Set new upload timeout for a new file.
uploadTimer.set();
});
busboy.on('finish', () => {
debugLog(options, `Busboy finished parsing request.`);
if (options.parseNested) {
req.body = processNested(req.body);
req.files = processNested(req.files);
}
if (!req[waitFlushProperty]) return next();
Promise.all(req[waitFlushProperty])
.then(() => {
delete req[waitFlushProperty];
next();
});
});
busboy.on('error', (err) => {
debugLog(options, `Busboy error`);
next(err);
});
req.pipe(busboy);
};

35
node_modules/express-fileupload/lib/processNested.js

@ -0,0 +1,35 @@
const { isSafeFromPollution } = require("./utilities");
module.exports = function(data){
if (!data || data.length < 1) return Object.create(null);
let d = Object.create(null),
keys = Object.keys(data);
for (let i = 0; i < keys.length; i++) {
let key = keys[i],
value = data[key],
current = d,
keyParts = key
.replace(new RegExp(/\[/g), '.')
.replace(new RegExp(/\]/g), '')
.split('.');
for (let index = 0; index < keyParts.length; index++){
let k = keyParts[index];
// Ensure we don't allow prototype pollution
if (!isSafeFromPollution(current, k)) {
continue;
}
if (index >= keyParts.length - 1){
current[k] = value;
} else {
if (!current[k]) current[k] = !isNaN(keyParts[index + 1]) ? [] : Object.create(null);
current = current[k];
}
}
}
return d;
};

64
node_modules/express-fileupload/lib/tempFileHandler.js

@ -0,0 +1,64 @@
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const {
debugLog,
checkAndMakeDir,
getTempFilename,
deleteFile
} = require('./utilities');
module.exports = (options, fieldname, filename) => {
const dir = path.normalize(options.tempFileDir);
const tempFilePath = path.join(dir, getTempFilename());
checkAndMakeDir({ createParentPath: true }, tempFilePath);
debugLog(options, `Temporary file path is ${tempFilePath}`);
const hash = crypto.createHash('md5');
let fileSize = 0;
let completed = false;
debugLog(options, `Opening write stream for ${fieldname}->${filename}...`);
const writeStream = fs.createWriteStream(tempFilePath);
const writePromise = new Promise((resolve, reject) => {
writeStream.on('finish', () => resolve());
writeStream.on('error', (err) => {
debugLog(options, `Error write temp file: ${err}`);
reject(err);
});
});
return {
dataHandler: (data) => {
if (completed === true) {
debugLog(options, `Error: got ${fieldname}->${filename} data chunk for completed upload!`);
return;
}
writeStream.write(data);
hash.update(data);
fileSize += data.length;
debugLog(options, `Uploading ${fieldname}->${filename}, bytes:${fileSize}...`);
},
getFilePath: () => tempFilePath,
getFileSize: () => fileSize,
getHash: () => hash.digest('hex'),
complete: () => {
completed = true;
debugLog(options, `Upload ${fieldname}->${filename} completed, bytes:${fileSize}.`);
if (writeStream !== false) writeStream.end();
// Return empty buff since data was uploaded into a temp file.
return Buffer.concat([]);
},
cleanup: () => {
completed = true;
debugLog(options, `Cleaning up temporary file ${tempFilePath}...`);
writeStream.end();
deleteFile(tempFilePath, err => (err
? debugLog(options, `Cleaning up temporary file ${tempFilePath} failed: ${err}`)
: debugLog(options, `Cleaning up temporary file ${tempFilePath} done.`)
));
},
getWritePromise: () => writePromise
};
};

26
node_modules/express-fileupload/lib/uploadtimer.js

@ -0,0 +1,26 @@
class UploadTimer {
/**
* @constructor
* @param {number} timeout - timer timeout in msecs.
* @param {Function} callback - callback to run when timeout reached.
*/
constructor(timeout = 0, callback = () => {}) {
this.timeout = timeout;
this.callback = callback;
this.timer = null;
}
clear() {
clearTimeout(this.timer);
}
set() {
// Do not start a timer if zero timeout or it hasn't been set.
if (!this.timeout) return false;
this.clear();
this.timer = setTimeout(this.callback, this.timeout);
return true;
}
}
module.exports = UploadTimer;

311
node_modules/express-fileupload/lib/utilities.js

@ -0,0 +1,311 @@
'use strict';
const fs = require('fs');
const path = require('path');
const { Readable } = require('stream');
// Parameters for safe file name parsing.
const SAFE_FILE_NAME_REGEX = /[^\w-]/g;
const MAX_EXTENSION_LENGTH = 3;
// Parameters to generate unique temporary file names:
const TEMP_COUNTER_MAX = 65536;
const TEMP_PREFIX = 'tmp';
let tempCounter = 0;
/**
* Logs message to console if debug option set to true.
* @param {Object} options - options object.
* @param {string} msg - message to log.
* @returns {boolean} - false if debug is off.
*/
const debugLog = (options, msg) => {
const opts = options || {};
if (!opts.debug) return false;
console.log(`Express-file-upload: ${msg}`); // eslint-disable-line
return true;
};
/**
* Generates unique temporary file name. e.g. tmp-5000-156788789789.
* @param {string} prefix - a prefix for generated unique file name.
* @returns {string}
*/
const getTempFilename = (prefix = TEMP_PREFIX) => {
tempCounter = tempCounter >= TEMP_COUNTER_MAX ? 1 : tempCounter + 1;
return `${prefix}-${tempCounter}-${Date.now()}`;
};
/**
* isFunc: Checks if argument is a function.
* @returns {boolean} - Returns true if argument is a function.
*/
const isFunc = func => func && func.constructor && func.call && func.apply ? true: false;
/**
* Set errorFunc to the same value as successFunc for callback mode.
* @returns {Function}
*/
const errorFunc = (resolve, reject) => isFunc(reject) ? reject : resolve;
/**
* Return a callback function for promise resole/reject args.
* Ensures that callback is called only once.
* @returns {Function}
*/
const promiseCallback = (resolve, reject) => {
let hasFired = false;
return (err) => {
if (hasFired) {
return;
}
hasFired = true;
return err ? errorFunc(resolve, reject)(err) : resolve();
};
};
/**
* Builds instance options from arguments objects(can't be arrow function).
* @returns {Object} - result options.
*/
const buildOptions = function() {
const result = {};
[...arguments].forEach(options => {
if (!options || typeof options !== 'object') return;
Object.keys(options).forEach(i => result[i] = options[i]);
});
return result;
};
// The default prototypes for both objects and arrays.
// Used by isSafeFromPollution
const OBJECT_PROTOTYPE_KEYS = Object.getOwnPropertyNames(Object.prototype);
const ARRAY_PROTOTYPE_KEYS = Object.getOwnPropertyNames(Array.prototype);
/**
* Determines whether a key insertion into an object could result in a prototype pollution
* @param {Object} base - The object whose insertion we are checking
* @param {string} key - The key that will be inserted
*/
const isSafeFromPollution = (base, key) => {
// We perform an instanceof check instead of Array.isArray as the former is more
// permissive for cases in which the object as an Array prototype but was not constructed
// via an Array constructor or literal.
const TOUCHES_ARRAY_PROTOTYPE = (base instanceof Array) && ARRAY_PROTOTYPE_KEYS.includes(key);
const TOUCHES_OBJECT_PROTOTYPE = OBJECT_PROTOTYPE_KEYS.includes(key);
return !TOUCHES_ARRAY_PROTOTYPE && !TOUCHES_OBJECT_PROTOTYPE;
};
/**
* Builds request fields (using to build req.body and req.files)
* @param {Object} instance - request object.
* @param {string} field - field name.
* @param {any} value - field value.
* @returns {Object}
*/
const buildFields = (instance, field, value) => {
// Do nothing if value is not set.
if (value === null || value === undefined) return instance;
instance = instance || Object.create(null);
if (!isSafeFromPollution(instance, field)) {
return instance;
}
// Non-array fields
if (!instance[field]) {
instance[field] = value;
return instance;
}
// Array fields
if (instance[field] instanceof Array) {
instance[field].push(value);
} else {
instance[field] = [instance[field], value];
}
return instance;
};
/**
* Creates a folder for file specified in the path variable
* @param {Object} fileUploadOptions
* @param {string} filePath
* @returns {boolean}
*/
const checkAndMakeDir = (fileUploadOptions, filePath) => {
// Check upload options were set.
if (!fileUploadOptions) return false;
if (!fileUploadOptions.createParentPath) return false;
// Check whether folder for the file exists.
if (!filePath) return false;
const parentPath = path.dirname(filePath);
// Create folder if it doesn't exist.
if (!fs.existsSync(parentPath)) fs.mkdirSync(parentPath, { recursive: true });
// Checks folder again and return a results.
return fs.existsSync(parentPath);
};
/**
* Deletes a file.
* @param {string} file - Path to the file to delete.
* @param {Function} callback
*/
const deleteFile = (file, callback) => fs.unlink(file, callback);
/**
* Copy file via streams
* @param {string} src - Path to the source file
* @param {string} dst - Path to the destination file.
*/
const copyFile = (src, dst, callback) => {
// cbCalled flag and runCb helps to run cb only once.
let cbCalled = false;
let runCb = (err) => {
if (cbCalled) return;
cbCalled = true;
callback(err);
};
// Create read stream
let readable = fs.createReadStream(src);
readable.on('error', runCb);
// Create write stream
let writable = fs.createWriteStream(dst);
writable.on('error', (err)=>{
readable.destroy();
runCb(err);
});
writable.on('close', () => runCb());
// Copy file via piping streams.
readable.pipe(writable);
};
/**
* moveFile: moves the file from src to dst.
* Firstly trying to rename the file if no luck copying it to dst and then deleteing src.
* @param {string} src - Path to the source file
* @param {string} dst - Path to the destination file.
* @param {Function} callback - A callback function.
*/
const moveFile = (src, dst, callback) => fs.rename(src, dst, err => (err
? copyFile(src, dst, err => err ? callback(err) : deleteFile(src, callback))
: callback()
));
/**
* Save buffer data to a file.
* @param {Buffer} buffer - buffer to save to a file.
* @param {string} filePath - path to a file.
*/
const saveBufferToFile = (buffer, filePath, callback) => {
if (!Buffer.isBuffer(buffer)) {
return callback(new Error('buffer variable should be type of Buffer!'));
}
// Setup readable stream from buffer.
let streamData = buffer;
let readStream = Readable();
readStream._read = () => {
readStream.push(streamData);
streamData = null;
};
// Setup file system writable stream.
let fstream = fs.createWriteStream(filePath);
// console.log("Calling saveBuffer");
fstream.on('error', err => {
// console.log("err cb")
callback(err);
});
fstream.on('close', () => {
// console.log("close cb");
callback();
});
// Copy file via piping streams.
readStream.pipe(fstream);
};
/**
* Decodes uriEncoded file names.
* @param fileName {String} - file name to decode.
* @returns {String}
*/
const uriDecodeFileName = (opts, fileName) => {
return opts.uriDecodeFileNames ? decodeURIComponent(fileName) : fileName;
};
/**
* Parses filename and extension and returns object {name, extension}.
* @param {boolean|integer} preserveExtension - true/false or number of characters for extension.
* @param {string} fileName - file name to parse.
* @returns {Object} - { name, extension }.
*/
const parseFileNameExtension = (preserveExtension, fileName) => {
const preserveExtensionLength = parseInt(preserveExtension);
const result = {name: fileName, extension: ''};
if (!preserveExtension && preserveExtensionLength !== 0) return result;
// Define maximum extension length
const maxExtLength = isNaN(preserveExtensionLength)
? MAX_EXTENSION_LENGTH
: Math.abs(preserveExtensionLength);
const nameParts = fileName.split('.');
if (nameParts.length < 2) return result;
let extension = nameParts.pop();
if (
extension.length > maxExtLength &&
maxExtLength > 0
) {
nameParts[nameParts.length - 1] +=
'.' +
extension.substr(0, extension.length - maxExtLength);
extension = extension.substr(-maxExtLength);
}
result.extension = maxExtLength ? extension : '';
result.name = nameParts.join('.');
return result;
};
/**
* Parse file name and extension.
* @param {Object} opts - middleware options.
* @param {string} fileName - Uploaded file name.
* @returns {string}
*/
const parseFileName = (opts, fileName) => {
// Check fileName argument
if (!fileName || typeof fileName !== 'string') return getTempFilename();
// Cut off file name if it's lenght more then 255.
let parsedName = fileName.length <= 255 ? fileName : fileName.substr(0, 255);
// Decode file name if uriDecodeFileNames option set true.
parsedName = uriDecodeFileName(opts, parsedName);
// Stop parsing file name if safeFileNames options hasn't been set.
if (!opts.safeFileNames) return parsedName;
// Set regular expression for the file name.
const nameRegex = typeof opts.safeFileNames === 'object' && opts.safeFileNames instanceof RegExp
? opts.safeFileNames
: SAFE_FILE_NAME_REGEX;
// Parse file name extension.
let {name, extension} = parseFileNameExtension(opts.preserveExtension, parsedName);
if (extension.length) extension = '.' + extension.replace(nameRegex, '');
return name.replace(nameRegex, '').concat(extension);
};
module.exports = {
isFunc,
debugLog,
copyFile, // For testing purpose.
moveFile,
errorFunc,
deleteFile, // For testing purpose.
buildFields,
buildOptions,
parseFileName,
getTempFilename,
promiseCallback,
checkAndMakeDir,
saveBufferToFile,
uriDecodeFileName,
isSafeFromPollution
};

42
node_modules/express-fileupload/package.json

@ -0,0 +1,42 @@
{
"name": "express-fileupload",
"version": "1.3.1",
"author": "Richard Girges <richardgirges@gmail.com>",
"description": "Simple express file upload middleware that wraps around Busboy",
"main": "./lib/index",
"scripts": {
"test": "nyc --reporter=html --reporter=text mocha -- -R spec",
"lint": "eslint ./",
"lint:fix": "eslint --fix ./",
"coveralls": "nyc report --reporter=text-lcov | coveralls"
},
"dependencies": {
"busboy": "^0.3.1"
},
"engines": {
"node": ">=12.0.0"
},
"keywords": [
"express",
"file-upload",
"upload",
"forms",
"multipart",
"files",
"busboy",
"middleware"
],
"license": "MIT",
"repository": "richardgirges/express-fileupload",
"devDependencies": {
"body-parser": "^1.19.0",
"coveralls": "^3.1.1",
"eslint": "^7.31.0",
"express": "^4.17.1",
"md5": "^2.3.0",
"mocha": "^9.2.0",
"nyc": "^15.1.0",
"rimraf": "^3.0.2",
"supertest": "^4.0.2"
}
}

78
node_modules/express-fileupload/test/fileFactory.spec.js

@ -0,0 +1,78 @@
'use strict';
const fs = require('fs');
const md5 = require('md5');
const path = require('path');
const assert = require('assert');
const server = require('./server');
const {isFunc} = require('../lib/utilities');
const fileFactory = require('../lib/fileFactory');
const mockFileName = 'basketball.png';
const mockFile = path.join(server.fileDir, mockFileName);
const mockBuffer = fs.readFileSync(mockFile);
const mockMd5 = md5(mockBuffer);
const mockFileOpts = {
name: mockFileName,
buffer: mockBuffer,
encoding: 'utf-8',
mimetype: 'image/png',
hash: mockMd5,
tempFilePath: mockFile
};
describe('Test of the fileFactory factory', function() {
beforeEach(() => server.clearUploadsDir());
it('return a file object', () => assert.ok(fileFactory(mockFileOpts)));
describe('Properties', function() {
it('contains the name property', () => {
assert.equal(fileFactory(mockFileOpts).name, mockFileName);
});
it('contains the data property', () => assert.ok(fileFactory(mockFileOpts).data));
it('contains the encoding property', () => {
assert.equal(fileFactory(mockFileOpts).encoding, 'utf-8');
});
it('contains the mimetype property', () => {
assert.equal(fileFactory(mockFileOpts).mimetype, 'image/png');
});
it('contains the md5 property', () => assert.equal(fileFactory(mockFileOpts).md5, mockMd5));
it('contains the mv method', () => assert.equal(isFunc(fileFactory(mockFileOpts).mv), true));
});
describe('File object behavior for in memory upload', function() {
const file = fileFactory(mockFileOpts);
it('move the file to the specified folder', (done) => {
file.mv(path.join(server.uploadDir, mockFileName), (err) => {
assert.ifError(err);
done();
});
});
it('reject the mv if the destination does not exists', (done) => {
file.mv(path.join(server.uploadDir, 'unknown', mockFileName), (err) => {
assert.ok(err);
done();
});
});
});
describe('File object behavior for upload into temporary file', function() {
const file = fileFactory(mockFileOpts, { useTempFiles: true });
it('move the file to the specified folder', (done) => {
file.mv(path.join(server.uploadDir, mockFileName), (err) => {
assert.ifError(err);
// Place back moved file.
fs.renameSync(path.join(server.uploadDir, mockFileName), mockFile);
done();
});
});
it('reject the mv if the destination does not exists', (done) => {
file.mv(path.join(server.uploadDir, 'unknown', mockFileName), (err) => {
assert.ok(err);
done();
});
});
});
});

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save