diff --git a/packages/csv-generate/lib/browser/index.js b/packages/csv-generate/lib/browser/index.js
index 60fc968be..2d8cc95ad 100644
--- a/packages/csv-generate/lib/browser/index.js
+++ b/packages/csv-generate/lib/browser/index.js
@@ -2,81 +2,26 @@
(function (Buffer){(function (){
"use strict";
-function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
-
-function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
-
-function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
-
-function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-
-/*
-CSV Generate - main module
-
-Please look at the [project documentation](https://csv.js.org/generate/) for
-additional information.
-*/
-var stream = require('stream');
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.Generator = exports.generate = exports["default"] = void 0;
-var util = require('util');
+var _stream = _interopRequireDefault(require("stream"));
-module.exports = function () {
- var options;
- var callback;
+var _util = _interopRequireDefault(require("util"));
- if (arguments.length === 2) {
- options = arguments[0];
- callback = arguments[1];
- } else if (arguments.length === 1) {
- if (typeof arguments[0] === 'function') {
- options = {};
- callback = arguments[0];
- } else {
- options = arguments[0];
- }
- } else if (arguments.length === 0) {
- options = {};
- }
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
- var generator = new Generator(options);
-
- if (callback) {
- var data = [];
- generator.on('readable', function () {
- var d;
-
- while (d = generator.read()) {
- data.push(d);
- }
- });
- generator.on('error', callback);
- generator.on('end', function () {
- if (generator.options.objectMode) {
- callback(null, data);
- } else {
- if (generator.options.encoding) {
- callback(null, data.join(''));
- } else {
- callback(null, Buffer.concat(data));
- }
- }
- });
- }
+function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
- return generator;
-};
+function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
-Generator = function (_Generator) {
- function Generator() {
- return _Generator.apply(this, arguments);
- }
+function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
- Generator.toString = function () {
- return _Generator.toString();
- };
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
- return Generator;
-}(function () {
+var Generator = function Generator() {
var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
// Convert Stream Readable options if underscored
@@ -89,7 +34,8 @@ Generator = function (_Generator) {
} // Call parent constructor
- stream.Readable.call(this, options); // Clone and camelize options
+ _stream["default"].Readable.call(this, options); // Clone and camelize options
+
this.options = {};
@@ -153,11 +99,12 @@ Generator = function (_Generator) {
}
return this;
-});
+};
-util.inherits(Generator, stream.Readable); // Export the class
+exports.Generator = Generator;
+
+_util["default"].inherits(Generator, _stream["default"].Readable); // Generate a random number between 0 and 1 with 2 decimals. The function is idempotent if it detect the "seed" option.
-module.exports.Generator = Generator; // Generate a random number between 0 and 1 with 2 decimals. The function is idempotent if it detect the "seed" option.
Generator.prototype.random = function () {
if (this.options.seed) {
@@ -344,6 +291,56 @@ Generator.camelize = function (str) {
});
};
+var generate = function generate() {
+ var options;
+ var callback;
+
+ if (arguments.length === 2) {
+ options = arguments[0];
+ callback = arguments[1];
+ } else if (arguments.length === 1) {
+ if (typeof arguments[0] === 'function') {
+ options = {};
+ callback = arguments[0];
+ } else {
+ options = arguments[0];
+ }
+ } else if (arguments.length === 0) {
+ options = {};
+ }
+
+ var generator = new Generator(options);
+
+ if (callback) {
+ var data = [];
+ generator.on('readable', function () {
+ var d;
+
+ while (d = generator.read()) {
+ data.push(d);
+ }
+ });
+ generator.on('error', callback);
+ generator.on('end', function () {
+ if (generator.options.objectMode) {
+ callback(null, data);
+ } else {
+ if (generator.options.encoding) {
+ callback(null, data.join(''));
+ } else {
+ callback(null, Buffer.concat(data));
+ }
+ }
+ });
+ }
+
+ return generator;
+};
+
+exports.generate = generate;
+var _default = generate;
+exports["default"] = _default;
+
}).call(this)}).call(this,require("buffer").Buffer)
},{"buffer":5,"stream":24,"util":43}],2:[function(require,module,exports){
(function (global){(function (){
diff --git a/packages/csv-generate/lib/browser/sync.js b/packages/csv-generate/lib/browser/sync.js
index 67815ed9d..dc4722105 100644
--- a/packages/csv-generate/lib/browser/sync.js
+++ b/packages/csv-generate/lib/browser/sync.js
@@ -2,81 +2,26 @@
(function (Buffer){(function (){
"use strict";
-function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
-
-function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
-
-function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
-
-function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-
-/*
-CSV Generate - main module
-
-Please look at the [project documentation](https://csv.js.org/generate/) for
-additional information.
-*/
-var stream = require('stream');
-
-var util = require('util');
-
-module.exports = function () {
- var options;
- var callback;
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.Generator = exports.generate = exports["default"] = void 0;
- if (arguments.length === 2) {
- options = arguments[0];
- callback = arguments[1];
- } else if (arguments.length === 1) {
- if (typeof arguments[0] === 'function') {
- options = {};
- callback = arguments[0];
- } else {
- options = arguments[0];
- }
- } else if (arguments.length === 0) {
- options = {};
- }
+var _stream = _interopRequireDefault(require("stream"));
- var generator = new Generator(options);
+var _util = _interopRequireDefault(require("util"));
- if (callback) {
- var data = [];
- generator.on('readable', function () {
- var d;
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
- while (d = generator.read()) {
- data.push(d);
- }
- });
- generator.on('error', callback);
- generator.on('end', function () {
- if (generator.options.objectMode) {
- callback(null, data);
- } else {
- if (generator.options.encoding) {
- callback(null, data.join(''));
- } else {
- callback(null, Buffer.concat(data));
- }
- }
- });
- }
+function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
- return generator;
-};
+function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
-Generator = function (_Generator) {
- function Generator() {
- return _Generator.apply(this, arguments);
- }
+function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
- Generator.toString = function () {
- return _Generator.toString();
- };
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
- return Generator;
-}(function () {
+var Generator = function Generator() {
var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
// Convert Stream Readable options if underscored
@@ -89,7 +34,8 @@ Generator = function (_Generator) {
} // Call parent constructor
- stream.Readable.call(this, options); // Clone and camelize options
+ _stream["default"].Readable.call(this, options); // Clone and camelize options
+
this.options = {};
@@ -153,11 +99,12 @@ Generator = function (_Generator) {
}
return this;
-});
+};
-util.inherits(Generator, stream.Readable); // Export the class
+exports.Generator = Generator;
+
+_util["default"].inherits(Generator, _stream["default"].Readable); // Generate a random number between 0 and 1 with 2 decimals. The function is idempotent if it detect the "seed" option.
-module.exports.Generator = Generator; // Generate a random number between 0 and 1 with 2 decimals. The function is idempotent if it detect the "seed" option.
Generator.prototype.random = function () {
if (this.options.seed) {
@@ -344,19 +291,72 @@ Generator.camelize = function (str) {
});
};
+var generate = function generate() {
+ var options;
+ var callback;
+
+ if (arguments.length === 2) {
+ options = arguments[0];
+ callback = arguments[1];
+ } else if (arguments.length === 1) {
+ if (typeof arguments[0] === 'function') {
+ options = {};
+ callback = arguments[0];
+ } else {
+ options = arguments[0];
+ }
+ } else if (arguments.length === 0) {
+ options = {};
+ }
+
+ var generator = new Generator(options);
+
+ if (callback) {
+ var data = [];
+ generator.on('readable', function () {
+ var d;
+
+ while (d = generator.read()) {
+ data.push(d);
+ }
+ });
+ generator.on('error', callback);
+ generator.on('end', function () {
+ if (generator.options.objectMode) {
+ callback(null, data);
+ } else {
+ if (generator.options.encoding) {
+ callback(null, data.join(''));
+ } else {
+ callback(null, Buffer.concat(data));
+ }
+ }
+ });
+ }
+
+ return generator;
+};
+
+exports.generate = generate;
+var _default = generate;
+exports["default"] = _default;
+
}).call(this)}).call(this,require("buffer").Buffer)
},{"buffer":6,"stream":25,"util":44}],2:[function(require,module,exports){
"use strict";
-/*
-CSV Generate - sync module
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = _default;
-Please look at the [project documentation](https://csv.js.org/generate/) for
-additional information.
-*/
-var generate = require('.');
+var _index = _interopRequireDefault(require("./index.js"));
-module.exports = function (options) {
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _default(options) {
if (typeof options === 'string' && /\d+/.test(options)) {
options = parseInt(options);
}
@@ -365,6 +365,8 @@ module.exports = function (options) {
options = {
length: options
};
+ } else if (_typeof(options) !== 'object' || options === null) {
+ throw Error('Invalid Argument: options must be an object or an integer');
}
if (!Number.isInteger(options.length)) {
@@ -375,7 +377,7 @@ module.exports = function (options) {
var work = true; // See https://nodejs.org/api/stream.html#stream_new_stream_readable_options
options.highWaterMark = options.objectMode ? 16 : 16384;
- var generator = new generate.Generator(options);
+ var generator = new _index["default"](options);
generator.push = function (chunk) {
if (chunk === null) {
@@ -398,9 +400,9 @@ module.exports = function (options) {
} else {
return chunks;
}
-};
+}
-},{".":1}],3:[function(require,module,exports){
+},{"./index.js":1}],3:[function(require,module,exports){
(function (global){(function (){
'use strict';
diff --git a/packages/csv-generate/lib/es5/index.d.ts b/packages/csv-generate/lib/es5/index.d.ts
deleted file mode 100644
index befb97335..000000000
--- a/packages/csv-generate/lib/es5/index.d.ts
+++ /dev/null
@@ -1,98 +0,0 @@
-
-///
-
-import * as stream from "stream";
-
-export = generate;
-
-declare function generate(options?: generate.Options, callback?: generate.Callback): generate.Generator;
-declare function generate(callback?: generate.Callback): generate.Generator;
-declare namespace generate {
-
- type Callback = (err?: Error, records?: any) => void;
-
- type MatcherFunc = (value: any) => boolean;
-
- class Generator extends stream.Readable {
- constructor(options?: Options);
-
- readonly options: Options;
- }
-
- interface Options {
-
- /**
- * Define the number of generated fields and the generation method.
- */
- columns?: number | string[];
-
- /**
- * Set the field delimiter.
- */
- delimiter?: string;
-
- /**
- * Period to run in milliseconds.
- */
- duration?: number;
-
- /**
- * If specified, then buffers will be decoded to strings using the specified encoding.
- */
- encoding?: string;
-
- /**
- * When to stop the generation.
- */
- end?: number | Date;
-
- /**
- * One or multiple characters to print at the end of the file; only apply when objectMode is disabled.
- */
- eof?: boolean | string;
-
- /**
- * Generate buffers equals length as defined by the `highWaterMark` option.
- */
- fixed_size?: boolean;
- fixedSize?: boolean;
-
- /**
- * The maximum number of bytes to store in the internal buffer before ceasing to read from the underlying resource.
- */
- high_water_mark?: number;
- highWaterMark?: number;
-
- /**
- * Number of lines or records to generate.
- */
- length?: number;
-
- /**
- * Maximum number of characters per word.
- */
- max_word_length?: number;
- maxWordLength?: number;
-
- /**
- * Whether this stream should behave as a stream of objects.
- */
- object_mode?: boolean
- objectMode?: boolean;
-
- /**
- * One or multiple characters used to delimit records.
- */
- row_delimiter?: string;
-
- /**
- * Generate idempotent random characters if a number provided.
- */
- seed?: boolean | number;
-
- /**
- * The time to wait between the generation of each records
- */
- sleep?: number;
- }
-}
diff --git a/packages/csv-generate/lib/es5/index.js b/packages/csv-generate/lib/es5/index.js
index a612975c9..775c0bae3 100644
--- a/packages/csv-generate/lib/es5/index.js
+++ b/packages/csv-generate/lib/es5/index.js
@@ -1,80 +1,25 @@
"use strict";
-function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
-
-function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
-
-function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
-
-function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-
-/*
-CSV Generate - main module
-
-Please look at the [project documentation](https://csv.js.org/generate/) for
-additional information.
-*/
-var stream = require('stream');
-
-var util = require('util');
-
-module.exports = function () {
- var options;
- var callback;
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.Generator = exports.generate = exports["default"] = void 0;
- if (arguments.length === 2) {
- options = arguments[0];
- callback = arguments[1];
- } else if (arguments.length === 1) {
- if (typeof arguments[0] === 'function') {
- options = {};
- callback = arguments[0];
- } else {
- options = arguments[0];
- }
- } else if (arguments.length === 0) {
- options = {};
- }
+var _stream = _interopRequireDefault(require("stream"));
- var generator = new Generator(options);
+var _util = _interopRequireDefault(require("util"));
- if (callback) {
- var data = [];
- generator.on('readable', function () {
- var d;
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
- while (d = generator.read()) {
- data.push(d);
- }
- });
- generator.on('error', callback);
- generator.on('end', function () {
- if (generator.options.objectMode) {
- callback(null, data);
- } else {
- if (generator.options.encoding) {
- callback(null, data.join(''));
- } else {
- callback(null, Buffer.concat(data));
- }
- }
- });
- }
+function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
- return generator;
-};
+function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
-Generator = function (_Generator) {
- function Generator() {
- return _Generator.apply(this, arguments);
- }
+function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
- Generator.toString = function () {
- return _Generator.toString();
- };
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
- return Generator;
-}(function () {
+var Generator = function Generator() {
var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
// Convert Stream Readable options if underscored
@@ -87,7 +32,8 @@ Generator = function (_Generator) {
} // Call parent constructor
- stream.Readable.call(this, options); // Clone and camelize options
+ _stream["default"].Readable.call(this, options); // Clone and camelize options
+
this.options = {};
@@ -151,11 +97,12 @@ Generator = function (_Generator) {
}
return this;
-});
+};
+
+exports.Generator = Generator;
-util.inherits(Generator, stream.Readable); // Export the class
+_util["default"].inherits(Generator, _stream["default"].Readable); // Generate a random number between 0 and 1 with 2 decimals. The function is idempotent if it detect the "seed" option.
-module.exports.Generator = Generator; // Generate a random number between 0 and 1 with 2 decimals. The function is idempotent if it detect the "seed" option.
Generator.prototype.random = function () {
if (this.options.seed) {
@@ -340,4 +287,54 @@ Generator.camelize = function (str) {
return str.replace(/_([a-z])/gi, function (_, match, index) {
return match.toUpperCase();
});
-};
\ No newline at end of file
+};
+
+var generate = function generate() {
+ var options;
+ var callback;
+
+ if (arguments.length === 2) {
+ options = arguments[0];
+ callback = arguments[1];
+ } else if (arguments.length === 1) {
+ if (typeof arguments[0] === 'function') {
+ options = {};
+ callback = arguments[0];
+ } else {
+ options = arguments[0];
+ }
+ } else if (arguments.length === 0) {
+ options = {};
+ }
+
+ var generator = new Generator(options);
+
+ if (callback) {
+ var data = [];
+ generator.on('readable', function () {
+ var d;
+
+ while (d = generator.read()) {
+ data.push(d);
+ }
+ });
+ generator.on('error', callback);
+ generator.on('end', function () {
+ if (generator.options.objectMode) {
+ callback(null, data);
+ } else {
+ if (generator.options.encoding) {
+ callback(null, data.join(''));
+ } else {
+ callback(null, Buffer.concat(data));
+ }
+ }
+ });
+ }
+
+ return generator;
+};
+
+exports.generate = generate;
+var _default = generate;
+exports["default"] = _default;
\ No newline at end of file
diff --git a/packages/csv-generate/lib/es5/sync.js b/packages/csv-generate/lib/es5/sync.js
index 468e1ead6..dd423e1d0 100644
--- a/packages/csv-generate/lib/es5/sync.js
+++ b/packages/csv-generate/lib/es5/sync.js
@@ -1,14 +1,17 @@
"use strict";
-/*
-CSV Generate - sync module
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = _default;
-Please look at the [project documentation](https://csv.js.org/generate/) for
-additional information.
-*/
-var generate = require('.');
+var _index = _interopRequireDefault(require("./index.js"));
-module.exports = function (options) {
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _default(options) {
if (typeof options === 'string' && /\d+/.test(options)) {
options = parseInt(options);
}
@@ -17,6 +20,8 @@ module.exports = function (options) {
options = {
length: options
};
+ } else if (_typeof(options) !== 'object' || options === null) {
+ throw Error('Invalid Argument: options must be an object or an integer');
}
if (!Number.isInteger(options.length)) {
@@ -27,7 +32,7 @@ module.exports = function (options) {
var work = true; // See https://nodejs.org/api/stream.html#stream_new_stream_readable_options
options.highWaterMark = options.objectMode ? 16 : 16384;
- var generator = new generate.Generator(options);
+ var generator = new _index["default"](options);
generator.push = function (chunk) {
if (chunk === null) {
@@ -50,4 +55,4 @@ module.exports = function (options) {
} else {
return chunks;
}
-};
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/packages/csv-generate/lib/index.d.ts b/packages/csv-generate/lib/index.d.ts
index befb97335..c7b924de2 100644
--- a/packages/csv-generate/lib/index.d.ts
+++ b/packages/csv-generate/lib/index.d.ts
@@ -3,96 +3,93 @@
import * as stream from "stream";
-export = generate;
-
-declare function generate(options?: generate.Options, callback?: generate.Callback): generate.Generator;
-declare function generate(callback?: generate.Callback): generate.Generator;
-declare namespace generate {
-
- type Callback = (err?: Error, records?: any) => void;
-
- type MatcherFunc = (value: any) => boolean;
-
- class Generator extends stream.Readable {
- constructor(options?: Options);
-
- readonly options: Options;
- }
-
- interface Options {
-
- /**
- * Define the number of generated fields and the generation method.
- */
- columns?: number | string[];
-
- /**
- * Set the field delimiter.
- */
- delimiter?: string;
-
- /**
- * Period to run in milliseconds.
- */
- duration?: number;
-
- /**
- * If specified, then buffers will be decoded to strings using the specified encoding.
- */
- encoding?: string;
-
- /**
- * When to stop the generation.
- */
- end?: number | Date;
-
- /**
- * One or multiple characters to print at the end of the file; only apply when objectMode is disabled.
- */
- eof?: boolean | string;
-
- /**
- * Generate buffers equals length as defined by the `highWaterMark` option.
- */
- fixed_size?: boolean;
- fixedSize?: boolean;
-
- /**
- * The maximum number of bytes to store in the internal buffer before ceasing to read from the underlying resource.
- */
- high_water_mark?: number;
- highWaterMark?: number;
-
- /**
- * Number of lines or records to generate.
- */
- length?: number;
-
- /**
- * Maximum number of characters per word.
- */
- max_word_length?: number;
- maxWordLength?: number;
-
- /**
- * Whether this stream should behave as a stream of objects.
- */
- object_mode?: boolean
- objectMode?: boolean;
-
- /**
- * One or multiple characters used to delimit records.
- */
- row_delimiter?: string;
-
- /**
- * Generate idempotent random characters if a number provided.
- */
- seed?: boolean | number;
-
- /**
- * The time to wait between the generation of each records
- */
- sleep?: number;
- }
+type Callback = (err?: Error, records?: any) => void;
+
+export class Generator extends stream.Readable {
+ constructor(options?: Options);
+
+ readonly options: Options;
+}
+
+export interface Options {
+
+ /**
+ * Define the number of generated fields and the generation method.
+ */
+ columns?: number | string[];
+
+ /**
+ * Set the field delimiter.
+ */
+ delimiter?: string;
+
+ /**
+ * Period to run in milliseconds.
+ */
+ duration?: number;
+
+ /**
+ * If specified, then buffers will be decoded to strings using the specified encoding.
+ */
+ encoding?: string;
+
+ /**
+ * When to stop the generation.
+ */
+ end?: number | Date;
+
+ /**
+ * One or multiple characters to print at the end of the file; only apply when objectMode is disabled.
+ */
+ eof?: boolean | string;
+
+ /**
+ * Generate buffers equals length as defined by the `highWaterMark` option.
+ */
+ fixed_size?: boolean;
+ fixedSize?: boolean;
+
+ /**
+ * The maximum number of bytes to store in the internal buffer before ceasing to read from the underlying resource.
+ */
+ high_water_mark?: number;
+ highWaterMark?: number;
+
+ /**
+ * Number of lines or records to generate.
+ */
+ length?: number;
+
+ /**
+ * Maximum number of characters per word.
+ */
+ max_word_length?: number;
+ maxWordLength?: number;
+
+ /**
+ * Whether this stream should behave as a stream of objects.
+ */
+ object_mode?: boolean
+ objectMode?: boolean;
+
+ /**
+ * One or multiple characters used to delimit records.
+ */
+ row_delimiter?: string;
+
+ /**
+ * Generate idempotent random characters if a number provided.
+ */
+ seed?: boolean | number;
+
+ /**
+ * The time to wait between the generation of each records
+ */
+ sleep?: number;
}
+
+declare function generate(options?: Options, callback?: Callback): Generator;
+declare function generate(callback?: Callback): Generator;
+
+export default generate;
+export {generate};
diff --git a/packages/csv-generate/lib/index.js b/packages/csv-generate/lib/index.js
index c3e5fba44..622f1bc26 100644
--- a/packages/csv-generate/lib/index.js
+++ b/packages/csv-generate/lib/index.js
@@ -6,50 +6,10 @@ Please look at the [project documentation](https://csv.js.org/generate/) for
additional information.
*/
-const stream = require('stream')
-const util = require('util')
+import stream from 'stream'
+import util from 'util'
-module.exports = function(){
- let options
- let callback
- if(arguments.length === 2){
- options = arguments[0]
- callback = arguments[1]
- }else if(arguments.length === 1){
- if(typeof arguments[0] === 'function'){
- options = {}
- callback = arguments[0]
- }else{
- options = arguments[0]
- }
- }else if(arguments.length === 0){
- options = {}
- }
- const generator = new Generator(options)
- if(callback){
- const data = []
- generator.on('readable', function(){
- let d; while(d = generator.read()){
- data.push(d)
- }
- })
- generator.on('error', callback)
- generator.on('end', function(){
- if(generator.options.objectMode){
- callback(null, data)
- }else{
- if(generator.options.encoding){
- callback(null, data.join(''))
- }else{
- callback(null, Buffer.concat(data))
- }
- }
- })
- }
- return generator
-}
-
-Generator = function(options = {}){
+const Generator = function(options = {}){
// Convert Stream Readable options if underscored
if(options.high_water_mark){
options.highWaterMark = options.high_water_mark
@@ -111,8 +71,7 @@ Generator = function(options = {}){
return this
}
util.inherits(Generator, stream.Readable)
-// Export the class
-module.exports.Generator = Generator
+
// Generate a random number between 0 and 1 with 2 decimals. The function is idempotent if it detect the "seed" option.
Generator.prototype.random = function(){
if(this.options.seed){
@@ -227,3 +186,46 @@ Generator.camelize = function(str){
return match.toUpperCase()
})
}
+
+const generate = function(){
+ let options
+ let callback
+ if(arguments.length === 2){
+ options = arguments[0]
+ callback = arguments[1]
+ }else if(arguments.length === 1){
+ if(typeof arguments[0] === 'function'){
+ options = {}
+ callback = arguments[0]
+ }else{
+ options = arguments[0]
+ }
+ }else if(arguments.length === 0){
+ options = {}
+ }
+ const generator = new Generator(options)
+ if(callback){
+ const data = []
+ generator.on('readable', function(){
+ let d; while(d = generator.read()){
+ data.push(d)
+ }
+ })
+ generator.on('error', callback)
+ generator.on('end', function(){
+ if(generator.options.objectMode){
+ callback(null, data)
+ }else{
+ if(generator.options.encoding){
+ callback(null, data.join(''))
+ }else{
+ callback(null, Buffer.concat(data))
+ }
+ }
+ })
+ }
+ return generator
+}
+
+export default generate
+export {generate, Generator}
diff --git a/packages/csv-generate/lib/sync.d.ts b/packages/csv-generate/lib/sync.d.ts
new file mode 100644
index 000000000..4a9c6dca7
--- /dev/null
+++ b/packages/csv-generate/lib/sync.d.ts
@@ -0,0 +1,6 @@
+
+import {Options} from './index';
+
+declare function generate(options: number | Options): string & Array;
+
+export default generate;
diff --git a/packages/csv-generate/lib/sync.js b/packages/csv-generate/lib/sync.js
index cd90a0a0a..c836d93da 100644
--- a/packages/csv-generate/lib/sync.js
+++ b/packages/csv-generate/lib/sync.js
@@ -6,16 +6,16 @@ Please look at the [project documentation](https://csv.js.org/generate/) for
additional information.
*/
-const generate = require('.')
+import Generator from './index.js'
-module.exports = function(options){
+export default function(options){
if(typeof options === 'string' && /\d+/.test(options)){
options = parseInt(options)
}
if(Number.isInteger(options)){
options = {length: options}
}else if(typeof options !== 'object' || options === null){
- throw Error('Invalid Argument: options must be an o object or a integer')
+ throw Error('Invalid Argument: options must be an object or an integer')
}
if(!Number.isInteger(options.length)){
throw Error('Invalid Argument: length is not defined')
@@ -24,7 +24,7 @@ module.exports = function(options){
let work = true
// See https://nodejs.org/api/stream.html#stream_new_stream_readable_options
options.highWaterMark = options.objectMode ? 16 : 16384
- const generator = new generate.Generator(options)
+ const generator = new Generator(options)
generator.push = function(chunk){
if(chunk === null){
return work = false
diff --git a/packages/csv-generate/package.json b/packages/csv-generate/package.json
index bc9d02b18..cfc075632 100644
--- a/packages/csv-generate/package.json
+++ b/packages/csv-generate/package.json
@@ -50,16 +50,15 @@
],
"main": "./lib",
"mocha": {
- "throw-deprecation": true,
- "require": [
- "should",
- "coffeescript/register",
- "ts-node/register"
- ],
"inline-diffs": true,
- "timeout": 40000,
+ "loader": "./test/loaders/all.mjs",
+ "recursive": true,
"reporter": "spec",
- "recursive": true
+ "require": [
+ "should"
+ ],
+ "throw-deprecation": true,
+ "timeout": 40000
},
"scripts": {
"build:babel": "cd lib && babel *.js -d es5 && cd ..",
@@ -69,5 +68,6 @@
"pretest": "npm run build",
"test": "mocha test/**/*.{coffee,ts}"
},
+ "type": "module",
"types": "./lib/index.d.ts"
}
diff --git a/packages/csv-generate/samples/api.callback.js b/packages/csv-generate/samples/api.callback.js
index abf087365..3df2587ea 100644
--- a/packages/csv-generate/samples/api.callback.js
+++ b/packages/csv-generate/samples/api.callback.js
@@ -1,6 +1,7 @@
-const generate = require('../lib')
-const assert = require('assert')
+import generate from '../lib/index.js'
+import assert from 'assert'
+
generate({
seed: 1,
objectMode: true,
diff --git a/packages/csv-generate/samples/api.stream.js b/packages/csv-generate/samples/api.stream.js
index 8fcd1f836..9de0d8b22 100644
--- a/packages/csv-generate/samples/api.stream.js
+++ b/packages/csv-generate/samples/api.stream.js
@@ -1,6 +1,7 @@
-const generate = require('../lib')
-const assert = require('assert')
+import generate from '../lib/index.js'
+import assert from 'assert'
+
const records = []
generate({
seed: 1,
diff --git a/packages/csv-generate/samples/api.sync.js b/packages/csv-generate/samples/api.sync.js
index e52fc7382..95e5b28c6 100644
--- a/packages/csv-generate/samples/api.sync.js
+++ b/packages/csv-generate/samples/api.sync.js
@@ -1,6 +1,6 @@
-const generate = require('../lib/sync')
-const assert = require('assert')
+import generate from '../lib/index.js'
+import assert from 'assert'
const records = generate({
seed: 1,
diff --git a/packages/csv-generate/samples/options.objectmode.callback.js b/packages/csv-generate/samples/options.objectmode.callback.js
index cceff173a..3df2587ea 100644
--- a/packages/csv-generate/samples/options.objectmode.callback.js
+++ b/packages/csv-generate/samples/options.objectmode.callback.js
@@ -1,6 +1,7 @@
-const generate = require('..')
-const assert = require('assert')
+import generate from '../lib/index.js'
+import assert from 'assert'
+
generate({
seed: 1,
objectMode: true,
diff --git a/packages/csv-generate/samples/options.objectmode.stream.js b/packages/csv-generate/samples/options.objectmode.stream.js
index 231a86be6..c411c539f 100644
--- a/packages/csv-generate/samples/options.objectmode.stream.js
+++ b/packages/csv-generate/samples/options.objectmode.stream.js
@@ -1,6 +1,7 @@
-const generate = require('..')
-const assert = require('assert')
+import generate from '../lib/index.js'
+import assert from 'assert'
+
generate({
seed: 1,
objectMode: true,
diff --git a/packages/csv-generate/samples/pipe.js b/packages/csv-generate/samples/pipe.js
index a01618fee..8aa8fda81 100644
--- a/packages/csv-generate/samples/pipe.js
+++ b/packages/csv-generate/samples/pipe.js
@@ -1,5 +1,5 @@
-const generate = require('../lib')
+import generate from '../lib/index.js'
generate({
columns: ['int', 'bool'],
diff --git a/packages/csv-generate/test/api.callback.coffee b/packages/csv-generate/test/api.callback.coffee
index e86d85294..60574dd3a 100644
--- a/packages/csv-generate/test/api.callback.coffee
+++ b/packages/csv-generate/test/api.callback.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'api callback', ->
diff --git a/packages/csv-generate/test/api.end.coffee b/packages/csv-generate/test/api.end.coffee
index 73655481f..2061027a8 100644
--- a/packages/csv-generate/test/api.end.coffee
+++ b/packages/csv-generate/test/api.end.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'api end', ->
diff --git a/packages/csv-generate/test/api.highWaterMark.coffee b/packages/csv-generate/test/api.highWaterMark.coffee
index 03705547f..a205ad21e 100644
--- a/packages/csv-generate/test/api.highWaterMark.coffee
+++ b/packages/csv-generate/test/api.highWaterMark.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'api highWaterMark', ->
diff --git a/packages/csv-generate/test/api.pipe.coffee b/packages/csv-generate/test/api.pipe.coffee
index 647b517d9..a02e55e97 100644
--- a/packages/csv-generate/test/api.pipe.coffee
+++ b/packages/csv-generate/test/api.pipe.coffee
@@ -1,7 +1,7 @@
-stream = require 'stream'
-util = require 'util'
-generate = require '../lib'
+import stream from 'stream'
+import util from 'util'
+import generate from '../lib/index.js'
describe 'api pipe', ->
diff --git a/packages/csv-generate/test/api.read.coffee b/packages/csv-generate/test/api.read.coffee
index 8b480ea89..33780cc90 100644
--- a/packages/csv-generate/test/api.read.coffee
+++ b/packages/csv-generate/test/api.read.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'api read', ->
diff --git a/packages/csv-generate/test/api.sync.coffee b/packages/csv-generate/test/api.sync.coffee
index b04ff13f3..1f4449d49 100644
--- a/packages/csv-generate/test/api.sync.coffee
+++ b/packages/csv-generate/test/api.sync.coffee
@@ -1,13 +1,13 @@
-generate = require '../lib/sync'
+import generate from '../lib/sync.js'
describe 'api sync', ->
it 'throw error if options isnt provided', ->
- (-> generate()).should.throw 'Invalid Argument: options must be an o object or a integer'
- (-> generate 3.14).should.throw 'Invalid Argument: options must be an o object or a integer'
+ (-> generate()).should.throw 'Invalid Argument: options must be an object or an integer'
+ (-> generate 3.14).should.throw 'Invalid Argument: options must be an object or an integer'
- it.only 'throw error if length isnt provided', ->
+ it 'throw error if length isnt provided', ->
(-> generate({})).should.throw 'Invalid Argument: length is not defined'
it 'accept length as an integer', ->
diff --git a/packages/csv-generate/test/api.types.ts b/packages/csv-generate/test/api.types.ts
index 7ff865a12..9339ff575 100644
--- a/packages/csv-generate/test/api.types.ts
+++ b/packages/csv-generate/test/api.types.ts
@@ -1,10 +1,40 @@
import 'should'
-import * as generate from '../lib/index'
-import {Options, Generator} from '../lib/index'
+import generate, {Options, Generator} from '../lib/index.js'
+import generateSync from '../lib/sync.js'
describe('API Types', () => {
+ describe('Initialisation', () => {
+
+ it('stream', () => {
+ // With callback
+ const generator: Generator = generate()
+ generator.destroy()
+ generator.should.be.an.Object()
+ // With callback
+ generate( (err, records) => err || records ).destroy()
+ // With options + callback
+ generate( {length: 1}, (err, records) => err || records )
+ })
+
+ it('sync with options as number', () => {
+ const generator: string = generateSync(1)
+ generator.should.be.a.String()
+ })
+
+ it('sync with options in string mode', () => {
+ const generator: string = generateSync({length: 1})
+ generator.should.be.a.String()
+ })
+
+ it('sync with options in object mode', () => {
+ const generator: Array> = generateSync({length: 1, objectMode: true})
+ generator.should.be.an.Array()
+ })
+
+ })
+
describe('Generator', () => {
it('Expose options', () => {
diff --git a/packages/csv-generate/test/loaders/all.mjs b/packages/csv-generate/test/loaders/all.mjs
new file mode 100644
index 000000000..4a3828589
--- /dev/null
+++ b/packages/csv-generate/test/loaders/all.mjs
@@ -0,0 +1,37 @@
+
+import * as coffee from './coffee.mjs'
+import * as ts from 'ts-node/esm'
+
+const coffeeRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+const tsRegex = /\.ts$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ if (coffeeRegex.test(specifier)) {
+ return coffee.resolve.apply(this, arguments)
+ }
+ if (tsRegex.test(specifier)) {
+ return ts.resolve.apply(this, arguments)
+ }
+ return ts.resolve.apply(this, arguments);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ if (coffeeRegex.test(url)) {
+ return coffee.getFormat.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.getFormat.apply(this, arguments)
+ }
+ return ts.getFormat.apply(this, arguments);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url } = context;
+ if (coffeeRegex.test(url)) {
+ return coffee.transformSource.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.transformSource.apply(this, arguments)
+ }
+ return ts.transformSource.apply(this, arguments);
+}
diff --git a/packages/csv-generate/test/loaders/coffee.mjs b/packages/csv-generate/test/loaders/coffee.mjs
new file mode 100644
index 000000000..f4945adb7
--- /dev/null
+++ b/packages/csv-generate/test/loaders/coffee.mjs
@@ -0,0 +1,50 @@
+// coffeescript-loader.mjs
+import { URL, pathToFileURL } from 'url';
+import CoffeeScript from 'coffeescript';
+import { cwd } from 'process';
+
+const baseURL = pathToFileURL(`${cwd()}/`).href;
+
+// CoffeeScript files end in .coffee, .litcoffee or .coffee.md.
+const extensionsRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ const { parentURL = baseURL } = context;
+ // Node.js normally errors on unknown file extensions, so return a URL for
+ // specifiers ending in the CoffeeScript file extensions.
+ if (extensionsRegex.test(specifier)) {
+ return {
+ url: new URL(specifier, parentURL).href,
+ stop: true
+ };
+ }
+ // Let Node.js handle all other specifiers.
+ return defaultResolve(specifier, context, defaultResolve);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ // Now that we patched resolve to let CoffeeScript URLs through, we need to
+ // tell Node.js what format such URLs should be interpreted as. For the
+ // purposes of this loader, all CoffeeScript URLs are ES modules.
+ if (extensionsRegex.test(url)) {
+ return {
+ format: 'module',
+ stop: true
+ };
+ }
+ // Let Node.js handle all other URLs.
+ return defaultGetFormat(url, context, defaultGetFormat);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url, format } = context;
+
+ if (extensionsRegex.test(url)) {
+ return {
+ source: CoffeeScript.compile(String(source), { bare: true })
+ };
+ }
+
+ // Let Node.js handle all other sources.
+ return defaultTransformSource(source, context, defaultTransformSource);
+}
diff --git a/packages/csv-generate/test/options.coffee b/packages/csv-generate/test/options.coffee
index a7796357c..5cbc05662 100644
--- a/packages/csv-generate/test/options.coffee
+++ b/packages/csv-generate/test/options.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'options', ->
diff --git a/packages/csv-generate/test/options.columns.coffee b/packages/csv-generate/test/options.columns.coffee
index 7ce581e48..c5e81dff7 100644
--- a/packages/csv-generate/test/options.columns.coffee
+++ b/packages/csv-generate/test/options.columns.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option columns', ->
diff --git a/packages/csv-generate/test/options.delimiter.coffee b/packages/csv-generate/test/options.delimiter.coffee
index 8aa071898..1fdb30bc9 100644
--- a/packages/csv-generate/test/options.delimiter.coffee
+++ b/packages/csv-generate/test/options.delimiter.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option delimiter', ->
diff --git a/packages/csv-generate/test/options.duration.coffee b/packages/csv-generate/test/options.duration.coffee
index 006ce129c..4e9438d3a 100644
--- a/packages/csv-generate/test/options.duration.coffee
+++ b/packages/csv-generate/test/options.duration.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option duration', ->
diff --git a/packages/csv-generate/test/options.encoding.coffee b/packages/csv-generate/test/options.encoding.coffee
index 52f923eaf..586b0cb4a 100644
--- a/packages/csv-generate/test/options.encoding.coffee
+++ b/packages/csv-generate/test/options.encoding.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'Option encoding', ->
diff --git a/packages/csv-generate/test/options.end.coffee b/packages/csv-generate/test/options.end.coffee
index 08cbc7968..908927597 100644
--- a/packages/csv-generate/test/options.end.coffee
+++ b/packages/csv-generate/test/options.end.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option end', ->
diff --git a/packages/csv-generate/test/options.eof.coffee b/packages/csv-generate/test/options.eof.coffee
index 820646c99..f00c48e0b 100644
--- a/packages/csv-generate/test/options.eof.coffee
+++ b/packages/csv-generate/test/options.eof.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option eof', ->
diff --git a/packages/csv-generate/test/options.fixed_size.coffee b/packages/csv-generate/test/options.fixed_size.coffee
index 2e921d36a..cc765d454 100644
--- a/packages/csv-generate/test/options.fixed_size.coffee
+++ b/packages/csv-generate/test/options.fixed_size.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option fixed_size', ->
diff --git a/packages/csv-generate/test/options.high_water_mark.coffee b/packages/csv-generate/test/options.high_water_mark.coffee
index ce84587bd..ad96b4fd9 100644
--- a/packages/csv-generate/test/options.high_water_mark.coffee
+++ b/packages/csv-generate/test/options.high_water_mark.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option high_water_mark', ->
diff --git a/packages/csv-generate/test/options.length.coffee b/packages/csv-generate/test/options.length.coffee
index 66e12a26e..7fd74cbda 100644
--- a/packages/csv-generate/test/options.length.coffee
+++ b/packages/csv-generate/test/options.length.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option length', ->
diff --git a/packages/csv-generate/test/options.max_word_length.coffee b/packages/csv-generate/test/options.max_word_length.coffee
index 3af74d3d9..fa282f17a 100644
--- a/packages/csv-generate/test/options.max_word_length.coffee
+++ b/packages/csv-generate/test/options.max_word_length.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option max_word_length', ->
diff --git a/packages/csv-generate/test/options.object_mode.coffee b/packages/csv-generate/test/options.object_mode.coffee
index d1ac51082..7447a5235 100644
--- a/packages/csv-generate/test/options.object_mode.coffee
+++ b/packages/csv-generate/test/options.object_mode.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option objectMode', ->
diff --git a/packages/csv-generate/test/options.row_delimiter.coffee b/packages/csv-generate/test/options.row_delimiter.coffee
index 55c7b060f..f72f6eaec 100644
--- a/packages/csv-generate/test/options.row_delimiter.coffee
+++ b/packages/csv-generate/test/options.row_delimiter.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option row_delimiter', ->
diff --git a/packages/csv-generate/test/options.seed.coffee b/packages/csv-generate/test/options.seed.coffee
index 77133cea5..5f9290ea5 100644
--- a/packages/csv-generate/test/options.seed.coffee
+++ b/packages/csv-generate/test/options.seed.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option seed', ->
diff --git a/packages/csv-generate/test/options.sleep.coffee b/packages/csv-generate/test/options.sleep.coffee
index a7f6fbb14..27488b7c2 100644
--- a/packages/csv-generate/test/options.sleep.coffee
+++ b/packages/csv-generate/test/options.sleep.coffee
@@ -1,5 +1,5 @@
-generate = require '../lib'
+import generate from '../lib/index.js'
describe 'option sleep', ->
diff --git a/packages/csv-generate/test/samples.coffee b/packages/csv-generate/test/samples.coffee
index 7d2906b71..e85d0ac1e 100644
--- a/packages/csv-generate/test/samples.coffee
+++ b/packages/csv-generate/test/samples.coffee
@@ -1,19 +1,17 @@
-fs = require('fs').promises
-util = require 'util'
-path = require 'path'
-{exec} = require 'child_process'
-each = require 'each'
-
-it 'samples', ->
- dir = path.resolve __dirname, '../samples'
- samples = await fs.readdir dir
- each samples.filter( (sample) -> /\.js/.test.sample)
- .call (sample, callback) ->
- exec "node #{path.resolve dir, sample}", (err) ->
- callback err
- .promise()
-
-
-
-
+import fs from 'fs'
+import path from 'path'
+import {exec} from 'child_process'
+
+import { fileURLToPath } from 'url';
+__dirname = path.dirname fileURLToPath `import.meta.url`
+dir = path.resolve __dirname, '../samples'
+samples = fs.readdirSync dir
+
+describe 'Samples', ->
+
+ for sample in samples
+ continue unless /\.js$/.test sample
+ it "Sample #{sample}", (callback) ->
+ exec "node #{path.resolve dir, sample}", (err) ->
+ callback err
diff --git a/packages/csv-generate/tsconfig.json b/packages/csv-generate/tsconfig.json
index 9f40782ea..4db508a7c 100644
--- a/packages/csv-generate/tsconfig.json
+++ b/packages/csv-generate/tsconfig.json
@@ -1,7 +1,8 @@
{
"compileOnSave": false,
"compilerOptions": {
- "target": "es6",
+ "esModuleInterop": true,
+ "module": "ES2020",
"moduleResolution": "node",
"strict": true,
}
diff --git a/packages/csv-parse/lib/ResizeableBuffer.js b/packages/csv-parse/lib/ResizeableBuffer.js
index 467422c12..9d992defa 100644
--- a/packages/csv-parse/lib/ResizeableBuffer.js
+++ b/packages/csv-parse/lib/ResizeableBuffer.js
@@ -62,4 +62,4 @@ class ResizeableBuffer{
}
}
-module.exports = ResizeableBuffer
+export default ResizeableBuffer
diff --git a/packages/csv-parse/lib/browser/index.js b/packages/csv-parse/lib/browser/index.js
index 14610cf07..089a12ffc 100644
--- a/packages/csv-parse/lib/browser/index.js
+++ b/packages/csv-parse/lib/browser/index.js
@@ -2,6 +2,11 @@
(function (Buffer){(function (){
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = void 0;
+
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
@@ -101,13 +106,25 @@ var ResizeableBuffer = /*#__PURE__*/function () {
return ResizeableBuffer;
}();
-module.exports = ResizeableBuffer;
+var _default = ResizeableBuffer;
+exports["default"] = _default;
}).call(this)}).call(this,require("buffer").Buffer)
},{"buffer":5}],2:[function(require,module,exports){
(function (Buffer,setImmediate){(function (){
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CsvError = exports.Parser = exports.parse = exports["default"] = void 0;
+
+var _stream = require("stream");
+
+var _ResizeableBuffer = _interopRequireDefault(require("./ResizeableBuffer.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
@@ -162,21 +179,10 @@ function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Re
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-/*
-CSV Parse
-
-Please look at the [project documentation](https://csv.js.org/parse/) for
-additional information.
-*/
-var _require = require('stream'),
- Transform = _require.Transform;
-
-var ResizeableBuffer = require('./ResizeableBuffer'); // white space characters
+// white space characters
// https://en.wikipedia.org/wiki/Whitespace_character
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions/Character_Classes#Types
// \f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff
-
-
var tab = 9;
var nl = 10; // \n, 0x0A in hexadecimal, 10 in decimal
@@ -624,7 +630,7 @@ var Parser = /*#__PURE__*/function (_Transform) {
escapeIsQuote: Buffer.isBuffer(options.escape) && Buffer.isBuffer(options.quote) && Buffer.compare(options.escape, options.quote) === 0,
// columns can be `false`, `true`, `Array`
expectedRecordLength: Array.isArray(options.columns) ? options.columns.length : undefined,
- field: new ResizeableBuffer(20),
+ field: new _ResizeableBuffer["default"](20),
firstLineToHeaders: fnFirstLineToHeaders,
needMoreDataSize: Math.max.apply(Math, [// Skip if the remaining buffer smaller than comment
options.comment !== null ? options.comment.length : 0].concat(_toConsumableArray(options.delimiter.map(function (delimiter) {
@@ -634,7 +640,7 @@ var Parser = /*#__PURE__*/function (_Transform) {
previousBuf: undefined,
quoting: false,
stop: false,
- rawBuffer: new ResizeableBuffer(100),
+ rawBuffer: new _ResizeableBuffer["default"](100),
record: [],
recordHasError: false,
record_length: 0,
@@ -1491,7 +1497,9 @@ var Parser = /*#__PURE__*/function (_Transform) {
}]);
return Parser;
-}(Transform);
+}(_stream.Transform);
+
+exports.Parser = Parser;
var parse = function parse() {
var data, options, callback;
@@ -1551,6 +1559,8 @@ var parse = function parse() {
return parser;
};
+exports.parse = parse;
+
var CsvError = /*#__PURE__*/function (_Error) {
_inherits(CsvError, _Error);
@@ -1589,9 +1599,9 @@ var CsvError = /*#__PURE__*/function (_Error) {
return CsvError;
}( /*#__PURE__*/_wrapNativeSuper(Error));
-parse.Parser = Parser;
-parse.CsvError = CsvError;
-module.exports = parse;
+exports.CsvError = CsvError;
+var _default = parse;
+exports["default"] = _default;
var underscore = function underscore(str) {
return str.replace(/([A-Z])/g, function (_, match) {
@@ -1638,7 +1648,7 @@ var normalizeColumnsArray = function normalizeColumnsArray(columns) {
};
}).call(this)}).call(this,require("buffer").Buffer,require("timers").setImmediate)
-},{"./ResizeableBuffer":1,"buffer":5,"stream":11,"timers":27}],3:[function(require,module,exports){
+},{"./ResizeableBuffer.js":1,"buffer":5,"stream":11,"timers":27}],3:[function(require,module,exports){
'use strict'
exports.byteLength = byteLength
diff --git a/packages/csv-parse/lib/browser/sync.js b/packages/csv-parse/lib/browser/sync.js
index 9fa8e1cf6..d0d2b94d4 100644
--- a/packages/csv-parse/lib/browser/sync.js
+++ b/packages/csv-parse/lib/browser/sync.js
@@ -2,6 +2,11 @@
(function (Buffer){(function (){
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = void 0;
+
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
@@ -101,13 +106,25 @@ var ResizeableBuffer = /*#__PURE__*/function () {
return ResizeableBuffer;
}();
-module.exports = ResizeableBuffer;
+var _default = ResizeableBuffer;
+exports["default"] = _default;
}).call(this)}).call(this,require("buffer").Buffer)
},{"buffer":6}],2:[function(require,module,exports){
(function (Buffer,setImmediate){(function (){
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CsvError = exports.Parser = exports.parse = exports["default"] = void 0;
+
+var _stream = require("stream");
+
+var _ResizeableBuffer = _interopRequireDefault(require("./ResizeableBuffer.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
@@ -162,21 +179,10 @@ function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Re
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-/*
-CSV Parse
-
-Please look at the [project documentation](https://csv.js.org/parse/) for
-additional information.
-*/
-var _require = require('stream'),
- Transform = _require.Transform;
-
-var ResizeableBuffer = require('./ResizeableBuffer'); // white space characters
+// white space characters
// https://en.wikipedia.org/wiki/Whitespace_character
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions/Character_Classes#Types
// \f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff
-
-
var tab = 9;
var nl = 10; // \n, 0x0A in hexadecimal, 10 in decimal
@@ -624,7 +630,7 @@ var Parser = /*#__PURE__*/function (_Transform) {
escapeIsQuote: Buffer.isBuffer(options.escape) && Buffer.isBuffer(options.quote) && Buffer.compare(options.escape, options.quote) === 0,
// columns can be `false`, `true`, `Array`
expectedRecordLength: Array.isArray(options.columns) ? options.columns.length : undefined,
- field: new ResizeableBuffer(20),
+ field: new _ResizeableBuffer["default"](20),
firstLineToHeaders: fnFirstLineToHeaders,
needMoreDataSize: Math.max.apply(Math, [// Skip if the remaining buffer smaller than comment
options.comment !== null ? options.comment.length : 0].concat(_toConsumableArray(options.delimiter.map(function (delimiter) {
@@ -634,7 +640,7 @@ var Parser = /*#__PURE__*/function (_Transform) {
previousBuf: undefined,
quoting: false,
stop: false,
- rawBuffer: new ResizeableBuffer(100),
+ rawBuffer: new _ResizeableBuffer["default"](100),
record: [],
recordHasError: false,
record_length: 0,
@@ -1491,7 +1497,9 @@ var Parser = /*#__PURE__*/function (_Transform) {
}]);
return Parser;
-}(Transform);
+}(_stream.Transform);
+
+exports.Parser = Parser;
var parse = function parse() {
var data, options, callback;
@@ -1551,6 +1559,8 @@ var parse = function parse() {
return parser;
};
+exports.parse = parse;
+
var CsvError = /*#__PURE__*/function (_Error) {
_inherits(CsvError, _Error);
@@ -1589,9 +1599,9 @@ var CsvError = /*#__PURE__*/function (_Error) {
return CsvError;
}( /*#__PURE__*/_wrapNativeSuper(Error));
-parse.Parser = Parser;
-parse.CsvError = CsvError;
-module.exports = parse;
+exports.CsvError = CsvError;
+var _default = parse;
+exports["default"] = _default;
var underscore = function underscore(str) {
return str.replace(/([A-Z])/g, function (_, match) {
@@ -1638,13 +1648,18 @@ var normalizeColumnsArray = function normalizeColumnsArray(columns) {
};
}).call(this)}).call(this,require("buffer").Buffer,require("timers").setImmediate)
-},{"./ResizeableBuffer":1,"buffer":6,"stream":12,"timers":28}],3:[function(require,module,exports){
+},{"./ResizeableBuffer.js":1,"buffer":6,"stream":12,"timers":28}],3:[function(require,module,exports){
(function (Buffer){(function (){
"use strict";
-var parse = require('.');
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = _default;
+
+var _index = require("./index.js");
-module.exports = function (data) {
+function _default(data) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
if (typeof data === 'string') {
@@ -1652,7 +1667,7 @@ module.exports = function (data) {
}
var records = options && options.objname ? {} : [];
- var parser = new parse.Parser(options);
+ var parser = new _index.Parser(options);
parser.push = function (record) {
if (record === null) {
@@ -1672,10 +1687,10 @@ module.exports = function (data) {
if (err2 !== undefined) throw err2;
return records;
-};
+}
}).call(this)}).call(this,require("buffer").Buffer)
-},{".":2,"buffer":6}],4:[function(require,module,exports){
+},{"./index.js":2,"buffer":6}],4:[function(require,module,exports){
'use strict'
exports.byteLength = byteLength
diff --git a/packages/csv-parse/lib/es5/ResizeableBuffer.js b/packages/csv-parse/lib/es5/ResizeableBuffer.js
index 8df1db336..b7a36ca2b 100644
--- a/packages/csv-parse/lib/es5/ResizeableBuffer.js
+++ b/packages/csv-parse/lib/es5/ResizeableBuffer.js
@@ -1,5 +1,10 @@
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = void 0;
+
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
@@ -99,4 +104,5 @@ var ResizeableBuffer = /*#__PURE__*/function () {
return ResizeableBuffer;
}();
-module.exports = ResizeableBuffer;
\ No newline at end of file
+var _default = ResizeableBuffer;
+exports["default"] = _default;
\ No newline at end of file
diff --git a/packages/csv-parse/lib/es5/index.d.ts b/packages/csv-parse/lib/es5/index.d.ts
deleted file mode 100644
index 60d6b3c0b..000000000
--- a/packages/csv-parse/lib/es5/index.d.ts
+++ /dev/null
@@ -1,268 +0,0 @@
-// Original definitions in https://github.com/DefinitelyTyped/DefinitelyTyped by: David Muller
-
-///
-
-import * as stream from "stream";
-
-export = parse;
-
-declare function parse(input: Buffer | string, options?: parse.Options, callback?: parse.Callback): parse.Parser;
-declare function parse(input: Buffer | string, callback?: parse.Callback): parse.Parser;
-declare function parse(options?: parse.Options, callback?: parse.Callback): parse.Parser;
-declare function parse(callback?: parse.Callback): parse.Parser;
-declare namespace parse {
-
- type Callback = (err: Error | undefined, records: any | undefined, info: Info) => void;
-
- interface Parser extends stream.Transform {}
-
- class Parser {
- constructor(options: Options);
-
- __push(line: any): any;
-
- __write(chars: any, end: any, callback: any): any;
-
- readonly options: Options
-
- readonly info: Info;
- }
-
- interface CastingContext {
- readonly column: number | string;
- readonly empty_lines: number;
- readonly error: CsvError;
- readonly header: boolean;
- readonly index: number;
- readonly quoting: boolean;
- readonly lines: number;
- readonly records: number;
- readonly invalid_field_length: number;
- }
-
- type CastingFunction = (value: string, context: CastingContext) => any;
-
- type CastingDateFunction = (value: string, context: CastingContext) => Date;
-
- type ColumnOption = string | undefined | null | false | { name: string };
-
- interface Options {
- /**
- * If true, the parser will attempt to convert read data types to native types.
- * @deprecated Use {@link cast}
- */
- auto_parse?: boolean | CastingFunction;
- autoParse?: boolean | CastingFunction;
- /**
- * If true, the parser will attempt to convert read data types to dates. It requires the "auto_parse" option.
- * @deprecated Use {@link cast_date}
- */
- auto_parse_date?: boolean | CastingDateFunction;
- autoParseDate?: boolean | CastingDateFunction;
- /**
- * If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
- */
- bom?: boolean;
- /**
- * If true, the parser will attempt to convert input string to native types.
- * If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
- */
- cast?: boolean | CastingFunction;
- /**
- * If true, the parser will attempt to convert input string to dates.
- * If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
- */
- cast_date?: boolean | CastingDateFunction;
- castDate?: boolean | CastingDateFunction;
- /**
- * List of fields as an array,
- * a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line,
- * default to null,
- * affect the result data set in the sense that records will be objects instead of arrays.
- */
- columns?: ColumnOption[] | boolean | ((record: any) => ColumnOption[]);
- /**
- * Convert values into an array of values when columns are activated and
- * when multiple columns of the same name are found.
- */
- columns_duplicates_to_array?: boolean;
- columnsDuplicatesToArray?: boolean;
- /**
- * Treat all the characters after this one as a comment, default to '' (disabled).
- */
- comment?: string;
- /**
- * Set the field delimiter. One character only, defaults to comma.
- */
- delimiter?: string | string[] | Buffer;
- /**
- * Set the source and destination encoding, a value of `null` returns buffer instead of strings.
- */
- encoding?: string | null;
- /**
- * Set the escape character, one character only, defaults to double quotes.
- */
- escape?: string | null | false | Buffer;
- /**
- * Start handling records from the requested number of records.
- */
- from?: number;
- /**
- * Start handling records from the requested line number.
- */
- from_line?: number;
- fromLine?: number;
- /**
- * Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
- */
- ignore_last_delimiters?: boolean | number;
- /**
- * Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
- */
- info?: boolean;
- /**
- * If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
- * Does not remove whitespace in a quoted field.
- */
- ltrim?: boolean;
- /**
- * Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
- * used to guard against a wrong delimiter or record_delimiter,
- * default to 128000 characters.
- */
- max_record_size?: number;
- maxRecordSize?: number;
- /**
- * Name of header-record title to name objects by.
- */
- objname?: string;
- /**
- * Alter and filter records by executing a user defined function.
- */
- on_record?: (record: any, context: CastingContext) => any;
- onRecord?: (record: any, context: CastingContext) => any;
- /**
- * Optional character surrounding a field, one character only, defaults to double quotes.
- */
- quote?: string | boolean | Buffer | null;
- /**
- * Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
- */
- raw?: boolean;
- /**
- * Preserve quotes inside unquoted field.
- */
- relax?: boolean;
- /**
- * Discard inconsistent columns count, default to false.
- */
- relax_column_count?: boolean;
- relaxColumnCount?: boolean;
- /**
- * Discard inconsistent columns count when the record contains less fields than expected, default to false.
- */
- relax_column_count_less?: boolean;
- relaxColumnCountLess?: boolean;
- /**
- * Discard inconsistent columns count when the record contains more fields than expected, default to false.
- */
- relax_column_count_more?: boolean;
- relaxColumnCountMore?: boolean;
- /**
- * One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
- * Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
- */
- record_delimiter?: string | string[] | Buffer | Buffer[];
- recordDelimiter?: string | string[] | Buffer | Buffer[];
- /**
- * If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
- * Does not remove whitespace in a quoted field.
- */
- rtrim?: boolean;
- /**
- * Dont generate empty values for empty lines.
- * Defaults to false
- */
- skip_empty_lines?: boolean;
- skipEmptyLines?: boolean;
- /**
- * Skip a line with error found inside and directly go process the next line.
- */
- skip_lines_with_error?: boolean;
- skipLinesWithError?: boolean;
- /**
- * Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
- */
- skip_lines_with_empty_values?: boolean;
- skipLinesWithEmptyValues?: boolean;
- /**
- * Stop handling records after the requested number of records.
- */
- to?: number;
- /**
- * Stop handling records after the requested line number.
- */
- to_line?: number;
- toLine?: number;
- /**
- * If true, ignore whitespace immediately around the delimiter, defaults to false.
- * Does not remove whitespace in a quoted field.
- */
- trim?: boolean;
- }
-
- interface Info {
- /**
- * Count the number of lines being fully commented.
- */
- readonly comment_lines: number;
- /**
- * Count the number of processed empty lines.
- */
- readonly empty_lines: number;
- /**
- * The number of lines encountered in the source dataset, start at 1 for the first line.
- */
- readonly lines: number;
- /**
- * Count the number of processed records.
- */
- readonly records: number;
- /**
- * Count of the number of processed bytes.
- */
- readonly bytes: number;
- /**
- * Number of non uniform records when `relax_column_count` is true.
- */
- readonly invalid_field_length: number;
- }
-
- class CsvError extends Error {
- readonly code: CsvErrorCode;
- [key: string]: any;
-
- constructor(code: CsvErrorCode, message: string | string[], options?: Options, ...contexts: any[]);
- }
-
- type CsvErrorCode =
- 'CSV_INVALID_OPTION_BOM'
- | 'CSV_INVALID_OPTION_CAST'
- | 'CSV_INVALID_OPTION_CAST_DATE'
- | 'CSV_INVALID_OPTION_COLUMNS'
- | 'CSV_INVALID_OPTION_COLUMNS_DUPLICATES_TO_ARRAY'
- | 'CSV_INVALID_OPTION_COMMENT'
- | 'CSV_INVALID_OPTION_DELIMITER'
- | 'CSV_INVALID_OPTION_ON_RECORD'
- | 'CSV_INVALID_CLOSING_QUOTE'
- | 'INVALID_OPENING_QUOTE'
- | 'CSV_INVALID_COLUMN_MAPPING'
- | 'CSV_INVALID_ARGUMENT'
- | 'CSV_INVALID_COLUMN_DEFINITION'
- | 'CSV_MAX_RECORD_SIZE'
- | 'CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE'
- | 'CSV_QUOTE_NOT_CLOSED'
- | 'CSV_INCONSISTENT_RECORD_LENGTH'
- | 'CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH'
- | 'CSV_OPTION_COLUMNS_MISSING_NAME'
-}
diff --git a/packages/csv-parse/lib/es5/index.js b/packages/csv-parse/lib/es5/index.js
index 503120077..c091e912b 100644
--- a/packages/csv-parse/lib/es5/index.js
+++ b/packages/csv-parse/lib/es5/index.js
@@ -1,5 +1,16 @@
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CsvError = exports.Parser = exports.parse = exports["default"] = void 0;
+
+var _stream = require("stream");
+
+var _ResizeableBuffer = _interopRequireDefault(require("./ResizeableBuffer.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
@@ -54,21 +65,10 @@ function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Re
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-/*
-CSV Parse
-
-Please look at the [project documentation](https://csv.js.org/parse/) for
-additional information.
-*/
-var _require = require('stream'),
- Transform = _require.Transform;
-
-var ResizeableBuffer = require('./ResizeableBuffer'); // white space characters
+// white space characters
// https://en.wikipedia.org/wiki/Whitespace_character
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions/Character_Classes#Types
// \f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff
-
-
var tab = 9;
var nl = 10; // \n, 0x0A in hexadecimal, 10 in decimal
@@ -516,7 +516,7 @@ var Parser = /*#__PURE__*/function (_Transform) {
escapeIsQuote: Buffer.isBuffer(options.escape) && Buffer.isBuffer(options.quote) && Buffer.compare(options.escape, options.quote) === 0,
// columns can be `false`, `true`, `Array`
expectedRecordLength: Array.isArray(options.columns) ? options.columns.length : undefined,
- field: new ResizeableBuffer(20),
+ field: new _ResizeableBuffer["default"](20),
firstLineToHeaders: fnFirstLineToHeaders,
needMoreDataSize: Math.max.apply(Math, [// Skip if the remaining buffer smaller than comment
options.comment !== null ? options.comment.length : 0].concat(_toConsumableArray(options.delimiter.map(function (delimiter) {
@@ -526,7 +526,7 @@ var Parser = /*#__PURE__*/function (_Transform) {
previousBuf: undefined,
quoting: false,
stop: false,
- rawBuffer: new ResizeableBuffer(100),
+ rawBuffer: new _ResizeableBuffer["default"](100),
record: [],
recordHasError: false,
record_length: 0,
@@ -1383,7 +1383,9 @@ var Parser = /*#__PURE__*/function (_Transform) {
}]);
return Parser;
-}(Transform);
+}(_stream.Transform);
+
+exports.Parser = Parser;
var parse = function parse() {
var data, options, callback;
@@ -1443,6 +1445,8 @@ var parse = function parse() {
return parser;
};
+exports.parse = parse;
+
var CsvError = /*#__PURE__*/function (_Error) {
_inherits(CsvError, _Error);
@@ -1481,9 +1485,9 @@ var CsvError = /*#__PURE__*/function (_Error) {
return CsvError;
}( /*#__PURE__*/_wrapNativeSuper(Error));
-parse.Parser = Parser;
-parse.CsvError = CsvError;
-module.exports = parse;
+exports.CsvError = CsvError;
+var _default = parse;
+exports["default"] = _default;
var underscore = function underscore(str) {
return str.replace(/([A-Z])/g, function (_, match) {
diff --git a/packages/csv-parse/lib/es5/sync.d.ts b/packages/csv-parse/lib/es5/sync.d.ts
deleted file mode 100644
index 7886698e3..000000000
--- a/packages/csv-parse/lib/es5/sync.d.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-import * as csvParse from './index';
-
-export = parse;
-
-declare function parse(input: Buffer | string, options?: csvParse.Options): any;
-declare namespace parse {}
\ No newline at end of file
diff --git a/packages/csv-parse/lib/es5/sync.js b/packages/csv-parse/lib/es5/sync.js
index 00106a09c..73916cbfc 100644
--- a/packages/csv-parse/lib/es5/sync.js
+++ b/packages/csv-parse/lib/es5/sync.js
@@ -1,8 +1,13 @@
"use strict";
-var parse = require('.');
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = _default;
-module.exports = function (data) {
+var _index = require("./index.js");
+
+function _default(data) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
if (typeof data === 'string') {
@@ -10,7 +15,7 @@ module.exports = function (data) {
}
var records = options && options.objname ? {} : [];
- var parser = new parse.Parser(options);
+ var parser = new _index.Parser(options);
parser.push = function (record) {
if (record === null) {
@@ -30,4 +35,4 @@ module.exports = function (data) {
if (err2 !== undefined) throw err2;
return records;
-};
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/packages/csv-parse/lib/index.d.ts b/packages/csv-parse/lib/index.d.ts
index 60d6b3c0b..ed99b1679 100644
--- a/packages/csv-parse/lib/index.d.ts
+++ b/packages/csv-parse/lib/index.d.ts
@@ -4,265 +4,266 @@
import * as stream from "stream";
-export = parse;
+export type Callback = (err: Error | undefined, records: any | undefined, info: Info) => void;
-declare function parse(input: Buffer | string, options?: parse.Options, callback?: parse.Callback): parse.Parser;
-declare function parse(input: Buffer | string, callback?: parse.Callback): parse.Parser;
-declare function parse(options?: parse.Options, callback?: parse.Callback): parse.Parser;
-declare function parse(callback?: parse.Callback): parse.Parser;
-declare namespace parse {
+export interface Parser extends stream.Transform {}
- type Callback = (err: Error | undefined, records: any | undefined, info: Info) => void;
+export class Parser {
+ constructor(options: Options);
+
+ __push(line: any): any;
+
+ __write(chars: any, end: any, callback: any): any;
+
+ readonly options: Options
+
+ readonly info: Info;
+}
- interface Parser extends stream.Transform {}
+export interface CastingContext {
+ readonly column: number | string;
+ readonly empty_lines: number;
+ readonly error: CsvError;
+ readonly header: boolean;
+ readonly index: number;
+ readonly quoting: boolean;
+ readonly lines: number;
+ readonly records: number;
+ readonly invalid_field_length: number;
+}
- class Parser {
- constructor(options: Options);
-
- __push(line: any): any;
-
- __write(chars: any, end: any, callback: any): any;
-
- readonly options: Options
-
- readonly info: Info;
- }
+export type CastingFunction = (value: string, context: CastingContext) => any;
- interface CastingContext {
- readonly column: number | string;
- readonly empty_lines: number;
- readonly error: CsvError;
- readonly header: boolean;
- readonly index: number;
- readonly quoting: boolean;
- readonly lines: number;
- readonly records: number;
- readonly invalid_field_length: number;
- }
+export type CastingDateFunction = (value: string, context: CastingContext) => Date;
- type CastingFunction = (value: string, context: CastingContext) => any;
+export type ColumnOption = string | undefined | null | false | { name: string };
- type CastingDateFunction = (value: string, context: CastingContext) => Date;
+export interface Options {
+ /**
+ * If true, the parser will attempt to convert read data types to native types.
+ * @deprecated Use {@link cast}
+ */
+ auto_parse?: boolean | CastingFunction;
+ autoParse?: boolean | CastingFunction;
+ /**
+ * If true, the parser will attempt to convert read data types to dates. It requires the "auto_parse" option.
+ * @deprecated Use {@link cast_date}
+ */
+ auto_parse_date?: boolean | CastingDateFunction;
+ autoParseDate?: boolean | CastingDateFunction;
+ /**
+ * If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
+ */
+ bom?: boolean;
+ /**
+ * If true, the parser will attempt to convert input string to native types.
+ * If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
+ */
+ cast?: boolean | CastingFunction;
+ /**
+ * If true, the parser will attempt to convert input string to dates.
+ * If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
+ */
+ cast_date?: boolean | CastingDateFunction;
+ castDate?: boolean | CastingDateFunction;
+ /**
+ * List of fields as an array,
+ * a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line,
+ * default to null,
+ * affect the result data set in the sense that records will be objects instead of arrays.
+ */
+ columns?: ColumnOption[] | boolean | ((record: any) => ColumnOption[]);
+ /**
+ * Convert values into an array of values when columns are activated and
+ * when multiple columns of the same name are found.
+ */
+ columns_duplicates_to_array?: boolean;
+ columnsDuplicatesToArray?: boolean;
+ /**
+ * Treat all the characters after this one as a comment, default to '' (disabled).
+ */
+ comment?: string;
+ /**
+ * Set the field delimiter. One character only, defaults to comma.
+ */
+ delimiter?: string | string[] | Buffer;
+ /**
+ * Set the source and destination encoding, a value of `null` returns buffer instead of strings.
+ */
+ encoding?: string | null;
+ /**
+ * Set the escape character, one character only, defaults to double quotes.
+ */
+ escape?: string | null | false | Buffer;
+ /**
+ * Start handling records from the requested number of records.
+ */
+ from?: number;
+ /**
+ * Start handling records from the requested line number.
+ */
+ from_line?: number;
+ fromLine?: number;
+ /**
+ * Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
+ */
+ ignore_last_delimiters?: boolean | number;
+ /**
+ * Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
+ */
+ info?: boolean;
+ /**
+ * If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
+ * Does not remove whitespace in a quoted field.
+ */
+ ltrim?: boolean;
+ /**
+ * Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
+ * used to guard against a wrong delimiter or record_delimiter,
+ * default to 128000 characters.
+ */
+ max_record_size?: number;
+ maxRecordSize?: number;
+ /**
+ * Name of header-record title to name objects by.
+ */
+ objname?: string;
+ /**
+ * Alter and filter records by executing a user defined function.
+ */
+ on_record?: (record: any, context: CastingContext) => any;
+ onRecord?: (record: any, context: CastingContext) => any;
+ /**
+ * Optional character surrounding a field, one character only, defaults to double quotes.
+ */
+ quote?: string | boolean | Buffer | null;
+ /**
+ * Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
+ */
+ raw?: boolean;
+ /**
+ * Preserve quotes inside unquoted field.
+ */
+ relax?: boolean;
+ /**
+ * Discard inconsistent columns count, default to false.
+ */
+ relax_column_count?: boolean;
+ relaxColumnCount?: boolean;
+ /**
+ * Discard inconsistent columns count when the record contains less fields than expected, default to false.
+ */
+ relax_column_count_less?: boolean;
+ relaxColumnCountLess?: boolean;
+ /**
+ * Discard inconsistent columns count when the record contains more fields than expected, default to false.
+ */
+ relax_column_count_more?: boolean;
+ relaxColumnCountMore?: boolean;
+ /**
+ * One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
+ * Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
+ */
+ record_delimiter?: string | string[] | Buffer | Buffer[];
+ recordDelimiter?: string | string[] | Buffer | Buffer[];
+ /**
+ * If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
+ * Does not remove whitespace in a quoted field.
+ */
+ rtrim?: boolean;
+ /**
+ * Dont generate empty values for empty lines.
+ * Defaults to false
+ */
+ skip_empty_lines?: boolean;
+ skipEmptyLines?: boolean;
+ /**
+ * Skip a line with error found inside and directly go process the next line.
+ */
+ skip_lines_with_error?: boolean;
+ skipLinesWithError?: boolean;
+ /**
+ * Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
+ */
+ skip_lines_with_empty_values?: boolean;
+ skipLinesWithEmptyValues?: boolean;
+ /**
+ * Stop handling records after the requested number of records.
+ */
+ to?: number;
+ /**
+ * Stop handling records after the requested line number.
+ */
+ to_line?: number;
+ toLine?: number;
+ /**
+ * If true, ignore whitespace immediately around the delimiter, defaults to false.
+ * Does not remove whitespace in a quoted field.
+ */
+ trim?: boolean;
+}
- type ColumnOption = string | undefined | null | false | { name: string };
+export interface Info {
+ /**
+ * Count the number of lines being fully commented.
+ */
+ readonly comment_lines: number;
+ /**
+ * Count the number of processed empty lines.
+ */
+ readonly empty_lines: number;
+ /**
+ * The number of lines encountered in the source dataset, start at 1 for the first line.
+ */
+ readonly lines: number;
+ /**
+ * Count the number of processed records.
+ */
+ readonly records: number;
+ /**
+ * Count of the number of processed bytes.
+ */
+ readonly bytes: number;
+ /**
+ * Number of non uniform records when `relax_column_count` is true.
+ */
+ readonly invalid_field_length: number;
+}
- interface Options {
- /**
- * If true, the parser will attempt to convert read data types to native types.
- * @deprecated Use {@link cast}
- */
- auto_parse?: boolean | CastingFunction;
- autoParse?: boolean | CastingFunction;
- /**
- * If true, the parser will attempt to convert read data types to dates. It requires the "auto_parse" option.
- * @deprecated Use {@link cast_date}
- */
- auto_parse_date?: boolean | CastingDateFunction;
- autoParseDate?: boolean | CastingDateFunction;
- /**
- * If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
- */
- bom?: boolean;
- /**
- * If true, the parser will attempt to convert input string to native types.
- * If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
- */
- cast?: boolean | CastingFunction;
- /**
- * If true, the parser will attempt to convert input string to dates.
- * If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
- */
- cast_date?: boolean | CastingDateFunction;
- castDate?: boolean | CastingDateFunction;
- /**
- * List of fields as an array,
- * a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line,
- * default to null,
- * affect the result data set in the sense that records will be objects instead of arrays.
- */
- columns?: ColumnOption[] | boolean | ((record: any) => ColumnOption[]);
- /**
- * Convert values into an array of values when columns are activated and
- * when multiple columns of the same name are found.
- */
- columns_duplicates_to_array?: boolean;
- columnsDuplicatesToArray?: boolean;
- /**
- * Treat all the characters after this one as a comment, default to '' (disabled).
- */
- comment?: string;
- /**
- * Set the field delimiter. One character only, defaults to comma.
- */
- delimiter?: string | string[] | Buffer;
- /**
- * Set the source and destination encoding, a value of `null` returns buffer instead of strings.
- */
- encoding?: string | null;
- /**
- * Set the escape character, one character only, defaults to double quotes.
- */
- escape?: string | null | false | Buffer;
- /**
- * Start handling records from the requested number of records.
- */
- from?: number;
- /**
- * Start handling records from the requested line number.
- */
- from_line?: number;
- fromLine?: number;
- /**
- * Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
- */
- ignore_last_delimiters?: boolean | number;
- /**
- * Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
- */
- info?: boolean;
- /**
- * If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
- * Does not remove whitespace in a quoted field.
- */
- ltrim?: boolean;
- /**
- * Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
- * used to guard against a wrong delimiter or record_delimiter,
- * default to 128000 characters.
- */
- max_record_size?: number;
- maxRecordSize?: number;
- /**
- * Name of header-record title to name objects by.
- */
- objname?: string;
- /**
- * Alter and filter records by executing a user defined function.
- */
- on_record?: (record: any, context: CastingContext) => any;
- onRecord?: (record: any, context: CastingContext) => any;
- /**
- * Optional character surrounding a field, one character only, defaults to double quotes.
- */
- quote?: string | boolean | Buffer | null;
- /**
- * Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
- */
- raw?: boolean;
- /**
- * Preserve quotes inside unquoted field.
- */
- relax?: boolean;
- /**
- * Discard inconsistent columns count, default to false.
- */
- relax_column_count?: boolean;
- relaxColumnCount?: boolean;
- /**
- * Discard inconsistent columns count when the record contains less fields than expected, default to false.
- */
- relax_column_count_less?: boolean;
- relaxColumnCountLess?: boolean;
- /**
- * Discard inconsistent columns count when the record contains more fields than expected, default to false.
- */
- relax_column_count_more?: boolean;
- relaxColumnCountMore?: boolean;
- /**
- * One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
- * Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
- */
- record_delimiter?: string | string[] | Buffer | Buffer[];
- recordDelimiter?: string | string[] | Buffer | Buffer[];
- /**
- * If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
- * Does not remove whitespace in a quoted field.
- */
- rtrim?: boolean;
- /**
- * Dont generate empty values for empty lines.
- * Defaults to false
- */
- skip_empty_lines?: boolean;
- skipEmptyLines?: boolean;
- /**
- * Skip a line with error found inside and directly go process the next line.
- */
- skip_lines_with_error?: boolean;
- skipLinesWithError?: boolean;
- /**
- * Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
- */
- skip_lines_with_empty_values?: boolean;
- skipLinesWithEmptyValues?: boolean;
- /**
- * Stop handling records after the requested number of records.
- */
- to?: number;
- /**
- * Stop handling records after the requested line number.
- */
- to_line?: number;
- toLine?: number;
- /**
- * If true, ignore whitespace immediately around the delimiter, defaults to false.
- * Does not remove whitespace in a quoted field.
- */
- trim?: boolean;
- }
- interface Info {
- /**
- * Count the number of lines being fully commented.
- */
- readonly comment_lines: number;
- /**
- * Count the number of processed empty lines.
- */
- readonly empty_lines: number;
- /**
- * The number of lines encountered in the source dataset, start at 1 for the first line.
- */
- readonly lines: number;
- /**
- * Count the number of processed records.
- */
- readonly records: number;
- /**
- * Count of the number of processed bytes.
- */
- readonly bytes: number;
- /**
- * Number of non uniform records when `relax_column_count` is true.
- */
- readonly invalid_field_length: number;
- }
-
- class CsvError extends Error {
- readonly code: CsvErrorCode;
- [key: string]: any;
-
- constructor(code: CsvErrorCode, message: string | string[], options?: Options, ...contexts: any[]);
- }
-
- type CsvErrorCode =
- 'CSV_INVALID_OPTION_BOM'
- | 'CSV_INVALID_OPTION_CAST'
- | 'CSV_INVALID_OPTION_CAST_DATE'
- | 'CSV_INVALID_OPTION_COLUMNS'
- | 'CSV_INVALID_OPTION_COLUMNS_DUPLICATES_TO_ARRAY'
- | 'CSV_INVALID_OPTION_COMMENT'
- | 'CSV_INVALID_OPTION_DELIMITER'
- | 'CSV_INVALID_OPTION_ON_RECORD'
- | 'CSV_INVALID_CLOSING_QUOTE'
- | 'INVALID_OPENING_QUOTE'
- | 'CSV_INVALID_COLUMN_MAPPING'
- | 'CSV_INVALID_ARGUMENT'
- | 'CSV_INVALID_COLUMN_DEFINITION'
- | 'CSV_MAX_RECORD_SIZE'
- | 'CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE'
- | 'CSV_QUOTE_NOT_CLOSED'
- | 'CSV_INCONSISTENT_RECORD_LENGTH'
- | 'CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH'
- | 'CSV_OPTION_COLUMNS_MISSING_NAME'
+export type CsvErrorCode =
+ 'CSV_INVALID_OPTION_BOM'
+ | 'CSV_INVALID_OPTION_CAST'
+ | 'CSV_INVALID_OPTION_CAST_DATE'
+ | 'CSV_INVALID_OPTION_COLUMNS'
+ | 'CSV_INVALID_OPTION_COLUMNS_DUPLICATES_TO_ARRAY'
+ | 'CSV_INVALID_OPTION_COMMENT'
+ | 'CSV_INVALID_OPTION_DELIMITER'
+ | 'CSV_INVALID_OPTION_ON_RECORD'
+ | 'CSV_INVALID_CLOSING_QUOTE'
+ | 'INVALID_OPENING_QUOTE'
+ | 'CSV_INVALID_COLUMN_MAPPING'
+ | 'CSV_INVALID_ARGUMENT'
+ | 'CSV_INVALID_COLUMN_DEFINITION'
+ | 'CSV_MAX_RECORD_SIZE'
+ | 'CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE'
+ | 'CSV_QUOTE_NOT_CLOSED'
+ | 'CSV_INCONSISTENT_RECORD_LENGTH'
+ | 'CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH'
+ | 'CSV_OPTION_COLUMNS_MISSING_NAME'
+
+export class CsvError extends Error {
+ readonly code: CsvErrorCode;
+ [key: string]: any;
+
+ constructor(code: CsvErrorCode, message: string | string[], options?: Options, ...contexts: any[]);
}
+
+declare function parse(input: Buffer | string, options?: Options, callback?: Callback): Parser;
+declare function parse(input: Buffer | string, callback?: Callback): Parser;
+declare function parse(options?: Options, callback?: Callback): Parser;
+declare function parse(callback?: Callback): Parser;
+
+export default parse;
+export {parse}
+// export = parse;
diff --git a/packages/csv-parse/lib/index.js b/packages/csv-parse/lib/index.js
index 164c1710e..fa6db8b75 100644
--- a/packages/csv-parse/lib/index.js
+++ b/packages/csv-parse/lib/index.js
@@ -6,8 +6,8 @@ Please look at the [project documentation](https://csv.js.org/parse/) for
additional information.
*/
-const { Transform } = require('stream')
-const ResizeableBuffer = require('./ResizeableBuffer')
+import { Transform } from 'stream'
+import ResizeableBuffer from './ResizeableBuffer.js'
// white space characters
// https://en.wikipedia.org/wiki/Whitespace_character
@@ -786,7 +786,7 @@ class Parser extends Transform {
], this.options, this.__infoField(), {
record: record,
})
- :
+ :
// Todo: rename CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH to
// CSV_RECORD_INCONSISTENT_COLUMNS
new CsvError('CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH', [
@@ -1219,11 +1219,8 @@ class CsvError extends Error {
}
}
-parse.Parser = Parser
-
-parse.CsvError = CsvError
-
-module.exports = parse
+export default parse
+export {parse, Parser, CsvError}
const underscore = function(str){
return str.replace(/([A-Z])/g, function(_, match){
diff --git a/packages/csv-parse/lib/sync.d.ts b/packages/csv-parse/lib/sync.d.ts
index 7886698e3..4fc0f2295 100644
--- a/packages/csv-parse/lib/sync.d.ts
+++ b/packages/csv-parse/lib/sync.d.ts
@@ -1,6 +1,6 @@
-import * as csvParse from './index';
-export = parse;
+import {Options} from './index';
-declare function parse(input: Buffer | string, options?: csvParse.Options): any;
-declare namespace parse {}
\ No newline at end of file
+declare function parse(input: Buffer | string, options?: Options): any;
+
+export default parse;
diff --git a/packages/csv-parse/lib/sync.js b/packages/csv-parse/lib/sync.js
index 3f592de78..d48fc13d6 100644
--- a/packages/csv-parse/lib/sync.js
+++ b/packages/csv-parse/lib/sync.js
@@ -1,12 +1,12 @@
-const parse = require('.')
+import {Parser} from './index.js'
-module.exports = function(data, options={}){
+export default function(data, options={}){
if(typeof data === 'string'){
data = Buffer.from(data)
}
const records = options && options.objname ? {} : []
- const parser = new parse.Parser(options)
+ const parser = new Parser(options)
parser.push = function(record){
if(record === null){
return
diff --git a/packages/csv-parse/package.json b/packages/csv-parse/package.json
index f9b60c5d0..87c2bfcce 100644
--- a/packages/csv-parse/package.json
+++ b/packages/csv-parse/package.json
@@ -74,16 +74,15 @@
],
"main": "./lib",
"mocha": {
- "throw-deprecation": true,
- "require": [
- "should",
- "coffeescript/register",
- "ts-node/register"
- ],
"inline-diffs": true,
- "timeout": 40000,
+ "loader": "./test/loaders/all.mjs",
+ "recursive": true,
"reporter": "spec",
- "recursive": true
+ "require": [
+ "should"
+ ],
+ "throw-deprecation": true,
+ "timeout": 40000
},
"scripts": {
"build:babel": "cd lib && babel *.js -d es5 && cd ..",
@@ -92,7 +91,8 @@
"preversion": "cp lib/*.ts lib/es5 && git add lib/es5/*.ts",
"pretest": "npm run build",
"lint": "eslint lib/*.js",
- "test": "npm run lint && TS_NODE_COMPILER_OPTIONS='{\"strictNullChecks\":true}' mocha test/**/*.{coffee,ts}"
+ "test": "TS_NODE_COMPILER_OPTIONS='{\"strictNullChecks\":true}' mocha test/**/*.{coffee,ts}"
},
+ "type": "module",
"types": "./lib/index.d.ts"
}
diff --git a/packages/csv-parse/samples/api.callback.js b/packages/csv-parse/samples/api.callback.js
index 99ad1cfcc..958ea889a 100644
--- a/packages/csv-parse/samples/api.callback.js
+++ b/packages/csv-parse/samples/api.callback.js
@@ -1,6 +1,6 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const input = '#Welcome\n"1","2","3","4"\n"a","b","c","d"'
parse(input, {
diff --git a/packages/csv-parse/samples/api.stream.js b/packages/csv-parse/samples/api.stream.js
index edd4aa065..177fb9acc 100644
--- a/packages/csv-parse/samples/api.stream.js
+++ b/packages/csv-parse/samples/api.stream.js
@@ -1,6 +1,6 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const output = []
// Create the parser
diff --git a/packages/csv-parse/samples/async.iterator.coffee b/packages/csv-parse/samples/async.iterator.coffee
index ef8538246..517c74465 100644
--- a/packages/csv-parse/samples/async.iterator.coffee
+++ b/packages/csv-parse/samples/async.iterator.coffee
@@ -1,7 +1,7 @@
-assert = require 'assert'
-generate = require 'csv-generate'
-parse = require '..'
+import assert from 'assert'
+import generate from 'csv-generate'
+import parse from '../lib/index.js'
(() ->
# Initialise the parser by generating random records
diff --git a/packages/csv-parse/samples/async.iterator.js b/packages/csv-parse/samples/async.iterator.js
index eb7a37ba8..5f49e903a 100644
--- a/packages/csv-parse/samples/async.iterator.js
+++ b/packages/csv-parse/samples/async.iterator.js
@@ -1,7 +1,7 @@
-const assert = require('assert');
-const generate = require('csv-generate');
-const parse = require('..');
+import assert from 'assert'
+import generate from 'csv-generate'
+import parse from '../lib/index.js'
(async () => {
// Initialise the parser by generating random records
diff --git a/packages/csv-parse/samples/columns-discovery.js b/packages/csv-parse/samples/columns-discovery.js
index 6cd547394..d8c222337 100644
--- a/packages/csv-parse/samples/columns-discovery.js
+++ b/packages/csv-parse/samples/columns-discovery.js
@@ -1,13 +1,14 @@
-// The package "should" must be installed:
-// `npm install should`
+import fs from 'fs'
+import parse from '../lib/index.js'
-fs = require('fs');
-parse = require('..');
+import { dirname } from 'path'
+import { fileURLToPath } from 'url';
+const __dirname = dirname(fileURLToPath(import.meta.url))
// Using the first line of the CSV data to discover the column names
-rs = fs.createReadStream(__dirname+'/columns-discovery.in');
-parser = parse({columns: true}, function(err, data){
+const rs = fs.createReadStream(__dirname+'/columns-discovery.in');
+const parser = parse({columns: true}, function(err, data){
console.log(data);
})
rs.pipe(parser);
@@ -19,4 +20,4 @@ rs.pipe(parser);
[ { Foo: 'first', Bar: 'row', Baz: 'items' },
{ Foo: 'second', Bar: 'row', Baz: 'items' } ]
-*/
\ No newline at end of file
+*/
diff --git a/packages/csv-parse/samples/comment.js b/packages/csv-parse/samples/comment.js
index 33512f2f4..7f42a3c33 100644
--- a/packages/csv-parse/samples/comment.js
+++ b/packages/csv-parse/samples/comment.js
@@ -1,15 +1,14 @@
-// The package "should" must be installed:
-// `npm install should`
-
-var parse = require('..');
-should = require('should')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(
'#Welcome\n"1","2","3","4"\n"a","b","c","d"',
{comment: '#'},
function(err, data){
- data.should.eql([ [ '1', '2', '3', '4' ], [ 'a', 'b', 'c', 'd' ] ]);
+ assert.deepStrictEqual(
+ data,
+ [ [ '1', '2', '3', '4' ], [ 'a', 'b', 'c', 'd' ] ]
+ )
}
);
-
diff --git a/packages/csv-parse/samples/fs_read.js b/packages/csv-parse/samples/fs_read.js
index 164b195ca..7beb9c90d 100644
--- a/packages/csv-parse/samples/fs_read.js
+++ b/packages/csv-parse/samples/fs_read.js
@@ -1,6 +1,10 @@
-var fs = require('fs');
-var parse = require('..');
+import fs from 'fs'
+import parse from '../lib/index.js'
+
+import { dirname } from 'path'
+import { fileURLToPath } from 'url';
+const __dirname = dirname(fileURLToPath(import.meta.url))
var parser = parse({delimiter: ';'}, function(err, data){
console.log(data);
diff --git a/packages/csv-parse/samples/mixed.input_stream.js b/packages/csv-parse/samples/mixed.input_stream.js
index 4862d809e..a5373011f 100644
--- a/packages/csv-parse/samples/mixed.input_stream.js
+++ b/packages/csv-parse/samples/mixed.input_stream.js
@@ -1,6 +1,7 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
+
// Create the parser
const parser = parse({
delimiter: ':'
diff --git a/packages/csv-parse/samples/mixed.output_stream.js b/packages/csv-parse/samples/mixed.output_stream.js
index 82a2bc1b3..e848a7664 100644
--- a/packages/csv-parse/samples/mixed.output_stream.js
+++ b/packages/csv-parse/samples/mixed.output_stream.js
@@ -1,6 +1,6 @@
-const parse = require('..')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const output = []
parse(`
diff --git a/packages/csv-parse/samples/module.sync.js b/packages/csv-parse/samples/module.sync.js
index f7f5ffc2e..f11429486 100644
--- a/packages/csv-parse/samples/module.sync.js
+++ b/packages/csv-parse/samples/module.sync.js
@@ -1,9 +1,6 @@
-// The package "should" must be installed:
-// `npm install should`
-
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const input = `
"key_1","key_2"
@@ -13,4 +10,7 @@ const records = parse(input, {
columns: true,
skip_empty_lines: true
})
-assert.deepStrictEqual(records, [{ key_1: 'value 1', key_2: 'value 2' }])
+assert.deepStrictEqual(
+ records,
+ [{ key_1: 'value 1', key_2: 'value 2' }]
+)
diff --git a/packages/csv-parse/samples/option.bom.hidden.js b/packages/csv-parse/samples/option.bom.hidden.js
index f0fcb2d95..d37cf4e0e 100644
--- a/packages/csv-parse/samples/option.bom.hidden.js
+++ b/packages/csv-parse/samples/option.bom.hidden.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = "\ufeffkey\nvalue"
const records = parse(data, {
diff --git a/packages/csv-parse/samples/option.bom.js b/packages/csv-parse/samples/option.bom.js
index 685c3281b..29460340b 100644
--- a/packages/csv-parse/samples/option.bom.js
+++ b/packages/csv-parse/samples/option.bom.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = "\ufeffa,b,c\n"
const records = parse(data, {
diff --git a/packages/csv-parse/samples/option.cast.js b/packages/csv-parse/samples/option.cast.js
index 4cac62c6e..561196420 100644
--- a/packages/csv-parse/samples/option.cast.js
+++ b/packages/csv-parse/samples/option.cast.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = `
2000-01-01,date1
diff --git a/packages/csv-parse/samples/option.cast_date.js b/packages/csv-parse/samples/option.cast_date.js
index 304ea7636..95412226b 100644
--- a/packages/csv-parse/samples/option.cast_date.js
+++ b/packages/csv-parse/samples/option.cast_date.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = `
2000-01-01,date1
diff --git a/packages/csv-parse/samples/option.columns.array.js b/packages/csv-parse/samples/option.columns.array.js
index f677e91d9..ceff79a48 100644
--- a/packages/csv-parse/samples/option.columns.array.js
+++ b/packages/csv-parse/samples/option.columns.array.js
@@ -1,6 +1,6 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
"value 1","value 2"
diff --git a/packages/csv-parse/samples/option.columns.function.js b/packages/csv-parse/samples/option.columns.function.js
index a864c3f94..443967411 100644
--- a/packages/csv-parse/samples/option.columns.function.js
+++ b/packages/csv-parse/samples/option.columns.function.js
@@ -1,6 +1,6 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
"key_1","key_2"
diff --git a/packages/csv-parse/samples/option.columns.true.js b/packages/csv-parse/samples/option.columns.true.js
index 63849652e..95cda07d6 100644
--- a/packages/csv-parse/samples/option.columns.true.js
+++ b/packages/csv-parse/samples/option.columns.true.js
@@ -1,6 +1,6 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
"key_1","key_2"
diff --git a/packages/csv-parse/samples/option.columns_duplicates_to_array.js b/packages/csv-parse/samples/option.columns_duplicates_to_array.js
index 3e867aeb7..8474dc49d 100644
--- a/packages/csv-parse/samples/option.columns_duplicates_to_array.js
+++ b/packages/csv-parse/samples/option.columns_duplicates_to_array.js
@@ -1,6 +1,6 @@
-const parse = require('..')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
friend,username,friend
diff --git a/packages/csv-parse/samples/option.comment.js b/packages/csv-parse/samples/option.comment.js
index 97803c200..176f876c7 100644
--- a/packages/csv-parse/samples/option.comment.js
+++ b/packages/csv-parse/samples/option.comment.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = `
# At the beginning of a record
diff --git a/packages/csv-parse/samples/option.delimiter.js b/packages/csv-parse/samples/option.delimiter.js
index a8708f3fd..5158b723a 100644
--- a/packages/csv-parse/samples/option.delimiter.js
+++ b/packages/csv-parse/samples/option.delimiter.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = 'a key => a value'
const records = parse(data, {
diff --git a/packages/csv-parse/samples/option.encoding.buffer.js b/packages/csv-parse/samples/option.encoding.buffer.js
index 68764b8b6..857a11824 100644
--- a/packages/csv-parse/samples/option.encoding.buffer.js
+++ b/packages/csv-parse/samples/option.encoding.buffer.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = Buffer.from(`a,b\n1,2`)
const records = parse(data, {
diff --git a/packages/csv-parse/samples/option.encoding.detection.js b/packages/csv-parse/samples/option.encoding.detection.js
index ec9388625..150ffffc8 100644
--- a/packages/csv-parse/samples/option.encoding.detection.js
+++ b/packages/csv-parse/samples/option.encoding.detection.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = Buffer.from(`\uFEFFa,b,c\n1,2,3`, 'utf16le')
const records = parse(data, {
diff --git a/packages/csv-parse/samples/option.encoding.options.js b/packages/csv-parse/samples/option.encoding.options.js
index c14092d9a..a77aa4529 100644
--- a/packages/csv-parse/samples/option.encoding.options.js
+++ b/packages/csv-parse/samples/option.encoding.options.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = Buffer.from(`a:b\n1:2`, 'utf16le')
const records = parse(data, {
diff --git a/packages/csv-parse/samples/option.escape.custom.js b/packages/csv-parse/samples/option.escape.custom.js
index 17081d6cb..73f238d5c 100644
--- a/packages/csv-parse/samples/option.escape.custom.js
+++ b/packages/csv-parse/samples/option.escape.custom.js
@@ -1,5 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = `a,"b\\"c",d`
const records = parse(data, { escape: '\\' })
diff --git a/packages/csv-parse/samples/option.escape.default.js b/packages/csv-parse/samples/option.escape.default.js
index 67c85a1fc..96418c8fc 100644
--- a/packages/csv-parse/samples/option.escape.default.js
+++ b/packages/csv-parse/samples/option.escape.default.js
@@ -1,5 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = `a,"b""c",d`
const records = parse(data)
diff --git a/packages/csv-parse/samples/option.from.js b/packages/csv-parse/samples/option.from.js
index 376be0137..ef2bb7ff3 100644
--- a/packages/csv-parse/samples/option.from.js
+++ b/packages/csv-parse/samples/option.from.js
@@ -1,5 +1,6 @@
-const parse = require('..')
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
a,b|1,2|3,4
diff --git a/packages/csv-parse/samples/option.from_line.js b/packages/csv-parse/samples/option.from_line.js
index 38eaf3bd5..419882634 100644
--- a/packages/csv-parse/samples/option.from_line.js
+++ b/packages/csv-parse/samples/option.from_line.js
@@ -1,6 +1,6 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
x,x
diff --git a/packages/csv-parse/samples/option.ignore_last_delimiters.js b/packages/csv-parse/samples/option.ignore_last_delimiters.js
index 09777f464..d7a025358 100644
--- a/packages/csv-parse/samples/option.ignore_last_delimiters.js
+++ b/packages/csv-parse/samples/option.ignore_last_delimiters.js
@@ -1,6 +1,6 @@
-const parse = require('..')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
format;description
diff --git a/packages/csv-parse/samples/option.info.js b/packages/csv-parse/samples/option.info.js
index 28e9df064..0262caaee 100644
--- a/packages/csv-parse/samples/option.info.js
+++ b/packages/csv-parse/samples/option.info.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = "a,b,c"
const records = parse(data, {
diff --git a/packages/csv-parse/samples/option.ltim.js b/packages/csv-parse/samples/option.ltim.js
index ce2b13623..ead2bb04f 100644
--- a/packages/csv-parse/samples/option.ltim.js
+++ b/packages/csv-parse/samples/option.ltim.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = [
'a ,1',
diff --git a/packages/csv-parse/samples/option.max_record_size.js b/packages/csv-parse/samples/option.max_record_size.js
index 6b3209f7e..6ebe8bc74 100644
--- a/packages/csv-parse/samples/option.max_record_size.js
+++ b/packages/csv-parse/samples/option.max_record_size.js
@@ -1,6 +1,6 @@
-const parse = require('..')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
"first","last"
diff --git a/packages/csv-parse/samples/objname.js b/packages/csv-parse/samples/option.objname.js
similarity index 72%
rename from packages/csv-parse/samples/objname.js
rename to packages/csv-parse/samples/option.objname.js
index 117ad8f89..65c605e5f 100644
--- a/packages/csv-parse/samples/objname.js
+++ b/packages/csv-parse/samples/option.objname.js
@@ -1,15 +1,15 @@
-// The package "should" must be installed:
-// `npm install should`
-
-var parse = require('..');
-require('should');
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(
'ColumnOne,ColumnTwo\nfirst,Data\nsecond,MoreData',
{'columns':true, 'objname': "ColumnOne"},
function(err, data){
if(err) throw err;
+ assert.deepStrictEqual({
+
+ })
data.should.eql({
first: { ColumnOne: 'first', ColumnTwo: 'Data' },
second: { ColumnOne: 'second', ColumnTwo: 'MoreData' }
diff --git a/packages/csv-parse/samples/option.on_record.alter.js b/packages/csv-parse/samples/option.on_record.alter.js
index 05eaae940..87a54c28a 100644
--- a/packages/csv-parse/samples/option.on_record.alter.js
+++ b/packages/csv-parse/samples/option.on_record.alter.js
@@ -1,6 +1,6 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
a.1,a.2,a.3
diff --git a/packages/csv-parse/samples/option.on_record.filter.js b/packages/csv-parse/samples/option.on_record.filter.js
index 2e9751a7c..98d3c402d 100644
--- a/packages/csv-parse/samples/option.on_record.filter.js
+++ b/packages/csv-parse/samples/option.on_record.filter.js
@@ -1,6 +1,6 @@
-const parse = require('..')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
line 1
diff --git a/packages/csv-parse/samples/option.relax_column_count.columns.js b/packages/csv-parse/samples/option.relax_column_count.columns.js
index f74786a89..fc012743b 100644
--- a/packages/csv-parse/samples/option.relax_column_count.columns.js
+++ b/packages/csv-parse/samples/option.relax_column_count.columns.js
@@ -1,6 +1,6 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
lastname,firstname,fullname
diff --git a/packages/csv-parse/samples/option.relax_column_count.js b/packages/csv-parse/samples/option.relax_column_count.js
index 92dd9382f..bc65e63c6 100644
--- a/packages/csv-parse/samples/option.relax_column_count.js
+++ b/packages/csv-parse/samples/option.relax_column_count.js
@@ -1,6 +1,6 @@
-const parse = require('../lib')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
parse(`
"a 1","a 2"
diff --git a/packages/csv-parse/samples/option.relax_column_count.record_inconsistent_columns.js b/packages/csv-parse/samples/option.relax_column_count.record_inconsistent_columns.js
index de9327eb9..d52b34357 100644
--- a/packages/csv-parse/samples/option.relax_column_count.record_inconsistent_columns.js
+++ b/packages/csv-parse/samples/option.relax_column_count.record_inconsistent_columns.js
@@ -1,5 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
const records = parse( '1,2\nin:va:lid\n3,4', {
columns: ['a', 'b'],
diff --git a/packages/csv-parse/samples/option.relax_column_count.record_inconsistent_length.js b/packages/csv-parse/samples/option.relax_column_count.record_inconsistent_length.js
index a24ee6811..1050d5bb8 100644
--- a/packages/csv-parse/samples/option.relax_column_count.record_inconsistent_length.js
+++ b/packages/csv-parse/samples/option.relax_column_count.record_inconsistent_length.js
@@ -1,5 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
const records = parse( '1,2\nin:va:lid\n3,4', {
relax_column_count: true,
diff --git a/packages/csv-parse/samples/option.rtim.js b/packages/csv-parse/samples/option.rtim.js
index ea1b2d760..23fc6e280 100644
--- a/packages/csv-parse/samples/option.rtim.js
+++ b/packages/csv-parse/samples/option.rtim.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const data = [
'a ,1',
diff --git a/packages/csv-parse/samples/option.skip_empty_lines.js b/packages/csv-parse/samples/option.skip_empty_lines.js
index 30a487e15..4297b4451 100644
--- a/packages/csv-parse/samples/option.skip_empty_lines.js
+++ b/packages/csv-parse/samples/option.skip_empty_lines.js
@@ -1,5 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
const records = parse(`
"a","b","c"
diff --git a/packages/csv-parse/samples/option.skip_empty_lines.trim.js b/packages/csv-parse/samples/option.skip_empty_lines.trim.js
index 4ab4f98bb..7337be951 100644
--- a/packages/csv-parse/samples/option.skip_empty_lines.trim.js
+++ b/packages/csv-parse/samples/option.skip_empty_lines.trim.js
@@ -1,5 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
const records = parse(`
"a","b","c"
diff --git a/packages/csv-parse/samples/option.skip_lines_with_error.js b/packages/csv-parse/samples/option.skip_lines_with_error.js
index d93f2b85f..531f58d56 100644
--- a/packages/csv-parse/samples/option.skip_lines_with_error.js
+++ b/packages/csv-parse/samples/option.skip_lines_with_error.js
@@ -1,5 +1,6 @@
-const parse = require('..')
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
parser = parse({
skip_lines_with_error: true
diff --git a/packages/csv-parse/samples/option.to_line.js b/packages/csv-parse/samples/option.to_line.js
index 727e45066..2d308ae64 100644
--- a/packages/csv-parse/samples/option.to_line.js
+++ b/packages/csv-parse/samples/option.to_line.js
@@ -1,5 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
const records = parse(`
a,1
diff --git a/packages/csv-parse/samples/option.trim.js b/packages/csv-parse/samples/option.trim.js
index 4815cec5c..b708dfad0 100644
--- a/packages/csv-parse/samples/option.trim.js
+++ b/packages/csv-parse/samples/option.trim.js
@@ -1,6 +1,6 @@
-const parse = require('../lib/sync')
-const assert = require('assert')
+import assert from 'assert'
+import parse from '../lib/index.js'
const records = parse('a ,1\nb, 2\n c,3', {
trim: true
diff --git a/packages/csv-parse/samples/recipe.async.iterator.coffee b/packages/csv-parse/samples/recipe.async.iterator.coffee
index f2eb0859d..7eab74dd0 100644
--- a/packages/csv-parse/samples/recipe.async.iterator.coffee
+++ b/packages/csv-parse/samples/recipe.async.iterator.coffee
@@ -1,7 +1,11 @@
-parse = require('..')
-fs = require('fs')
-
+import fs from 'fs'
+import parse from '../lib/index.js'
+
+import { dirname } from 'path'
+import { fileURLToPath } from 'url';
+__dirname = dirname fileURLToPath `import.meta.url`
+
processFile = () ->
records = []
parser = fs
diff --git a/packages/csv-parse/samples/recipe.async.iterator.js b/packages/csv-parse/samples/recipe.async.iterator.js
index 3dc869aa0..2540683b4 100644
--- a/packages/csv-parse/samples/recipe.async.iterator.js
+++ b/packages/csv-parse/samples/recipe.async.iterator.js
@@ -1,9 +1,13 @@
-const parse = require('..');
-const fs = require('fs');
-
+import fs from 'fs'
+import parse from '../lib/index.js'
+
+import { dirname } from 'path'
+import { fileURLToPath } from 'url';
+const __dirname = dirname(fileURLToPath(import.meta.url))
+
const processFile = async () => {
- records = []
+ const records = []
const parser = fs
.createReadStream(`${__dirname}/fs_read.csv`)
.pipe(parse({
diff --git a/packages/csv-parse/samples/recipe.file.js b/packages/csv-parse/samples/recipe.file.js
index 8e48c6864..1ae7000df 100644
--- a/packages/csv-parse/samples/recipe.file.js
+++ b/packages/csv-parse/samples/recipe.file.js
@@ -1,7 +1,7 @@
-const os = require('os');
-const fs = require('fs').promises;
-const parse = require('../lib/sync');
+import fs from 'fs/promises'
+import os from 'os'
+import parse from '../lib/sync.js'
(async function(){
// Prepare the dataset
@@ -17,6 +17,6 @@ const parse = require('../lib/sync');
// Print records to the console
// records.map( record => console.log(record) )
// Write a file with one JSON per line for each record
- json = records.map( JSON.stringify ).join('\n')
+ const json = records.map( JSON.stringify ).join('\n')
fs.writeFile(`${os.tmpdir()}/output.csv`, json)
})()
diff --git a/packages/csv-parse/samples/recipe.pipe.js b/packages/csv-parse/samples/recipe.pipe.js
index 2da918203..629fdff2e 100644
--- a/packages/csv-parse/samples/recipe.pipe.js
+++ b/packages/csv-parse/samples/recipe.pipe.js
@@ -1,7 +1,7 @@
-const parse = require('..')
-const generate = require('csv-generate')
-const transform = require('stream-transform')
+import parse from '../lib/index.js'
+import generate from 'csv-generate'
+import transform from 'csv-transform'
const generator = generate({
length: 20
diff --git a/packages/csv-parse/samples/recipe.promises.js b/packages/csv-parse/samples/recipe.promises.js
index b207322e7..76199ef13 100644
--- a/packages/csv-parse/samples/recipe.promises.js
+++ b/packages/csv-parse/samples/recipe.promises.js
@@ -1,10 +1,14 @@
-const parse = require('..');
-const fs = require('fs');
-const { finished } = require('stream');
-
+import fs from 'fs'
+import parse from '../lib/index.js'
+import { finished } from 'stream/promises'
+
+import { dirname } from 'path'
+import { fileURLToPath } from 'url';
+const __dirname = dirname(fileURLToPath(import.meta.url))
+
const processFile = async () => {
- records = []
+ const records = []
const parser = fs
.createReadStream(`${__dirname}/fs_read.csv`)
.pipe(parse({
diff --git a/packages/csv-parse/samples/tsv.js b/packages/csv-parse/samples/tsv.js
index 6386544a9..51ffc069e 100644
--- a/packages/csv-parse/samples/tsv.js
+++ b/packages/csv-parse/samples/tsv.js
@@ -1,5 +1,6 @@
-const parse = require('..');
-const assert = require('assert')
+
+import assert from 'assert'
+import parse from '../lib/index.js'
parse( "1 2 3\ra b c", {delimiter: '\t'}, function(err, data){
if(err) throw err;
diff --git a/packages/csv-parse/test/ResizableBuffer.coffee b/packages/csv-parse/test/ResizableBuffer.coffee
index 11a2a3f7b..8416cd853 100644
--- a/packages/csv-parse/test/ResizableBuffer.coffee
+++ b/packages/csv-parse/test/ResizableBuffer.coffee
@@ -1,5 +1,5 @@
-ResizeableBuffer = require('../lib/ResizeableBuffer')
+import ResizeableBuffer from '../lib/ResizeableBuffer.js'
describe 'ResizeableBuffer', ->
diff --git a/packages/csv-parse/test/api.arguments.coffee b/packages/csv-parse/test/api.arguments.coffee
index 03f16b0a0..273168086 100644
--- a/packages/csv-parse/test/api.arguments.coffee
+++ b/packages/csv-parse/test/api.arguments.coffee
@@ -1,12 +1,12 @@
-generate = require 'csv-generate'
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import generate from 'csv-generate'
+import parse, {Parser} from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'API arguments', ->
it 'exports Parser class', ->
- parse.Parser.should.be.a.Function
+ Parser.should.be.a.Function
describe '0 arg', ->
diff --git a/packages/csv-parse/test/api.assert_error.coffee b/packages/csv-parse/test/api.assert_error.coffee
index 2b9fca37a..a24c4fdfe 100644
--- a/packages/csv-parse/test/api.assert_error.coffee
+++ b/packages/csv-parse/test/api.assert_error.coffee
@@ -1,8 +1,8 @@
-{CsvError} = require '..'
-ResizeableBuffer = require '../lib/ResizeableBuffer'
+import {CsvError} from '../lib/index.js'
+import ResizeableBuffer from '../lib/ResizeableBuffer.js'
-module.exports = assert_error = (err, assert = {}, exhaustive = false) ->
+export assert_error = (err, assert = {}, exhaustive = false) ->
if Array.isArray err
assert_error e, assert[i] for e, i in err
return
diff --git a/packages/csv-parse/test/api.destroy.coffee b/packages/csv-parse/test/api.destroy.coffee
index fb1079e1c..13a56174a 100644
--- a/packages/csv-parse/test/api.destroy.coffee
+++ b/packages/csv-parse/test/api.destroy.coffee
@@ -1,8 +1,8 @@
-parse = require '../lib'
-generate = require 'csv-generate'
-fs = require 'fs'
-os = require 'os'
+import fs from 'fs'
+import os from 'os'
+import generate from 'csv-generate'
+import parse from '../lib/index.js'
describe 'API destroy', ->
diff --git a/packages/csv-parse/test/api.error.coffee b/packages/csv-parse/test/api.error.coffee
index cc2838ed9..304cdb5f9 100644
--- a/packages/csv-parse/test/api.error.coffee
+++ b/packages/csv-parse/test/api.error.coffee
@@ -1,22 +1,22 @@
-parse = require '..'
-assert_error = require './api.assert_error'
+import parse, {CsvError} from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'API error', ->
it 'set code', ->
- err = new parse.CsvError 'MY_CODE', ['a', 'b', 'c']
+ err = new CsvError 'MY_CODE', ['a', 'b', 'c']
err.code.should.eql 'MY_CODE'
it 'convert array message to string', ->
- err = new parse.CsvError 'MY_CODE', ['a', 'b', 'c']
+ err = new CsvError 'MY_CODE', ['a', 'b', 'c']
err.message.should.eql 'a b c'
it 'set additional context information', ->
- err = new parse.CsvError 'MY_CODE', 'msg', {}, a: 1, b: 2
+ err = new CsvError 'MY_CODE', 'msg', {}, a: 1, b: 2
err.a.should.eql 1
err.b.should.eql 2
-
+
it 'errors are enriched by context', ->
parse 'a"b', (err) ->
assert_error err,
diff --git a/packages/csv-parse/test/api.events.coffee b/packages/csv-parse/test/api.events.coffee
index 041bcd841..21ad3ae6a 100644
--- a/packages/csv-parse/test/api.events.coffee
+++ b/packages/csv-parse/test/api.events.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'API events', ->
diff --git a/packages/csv-parse/test/api.info.coffee b/packages/csv-parse/test/api.info.coffee
index e055e903c..f6aed74ee 100644
--- a/packages/csv-parse/test/api.info.coffee
+++ b/packages/csv-parse/test/api.info.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'API info', ->
diff --git a/packages/csv-parse/test/api.pipe.coffee b/packages/csv-parse/test/api.pipe.coffee
index 6c3860255..0c60e856e 100644
--- a/packages/csv-parse/test/api.pipe.coffee
+++ b/packages/csv-parse/test/api.pipe.coffee
@@ -1,8 +1,8 @@
-fs = require 'fs'
-{ Readable } = require 'stream'
-generate = require 'csv-generate'
-parse = require '../lib'
+import fs from 'fs'
+import { Readable } from 'stream'
+import generate from 'csv-generate'
+import parse from '../lib/index.js'
describe 'API pipe', ->
diff --git a/packages/csv-parse/test/api.sync.coffee b/packages/csv-parse/test/api.sync.coffee
index d2c9c5632..9a069b8f5 100644
--- a/packages/csv-parse/test/api.sync.coffee
+++ b/packages/csv-parse/test/api.sync.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib/sync'
+import parse from '../lib/sync.js'
describe 'API sync', ->
diff --git a/packages/csv-parse/test/api.types.ts b/packages/csv-parse/test/api.types.ts
index 8d2e5a746..fb129ff9b 100644
--- a/packages/csv-parse/test/api.types.ts
+++ b/packages/csv-parse/test/api.types.ts
@@ -1,8 +1,7 @@
import 'should'
-import * as parse from '../lib/index'
-import * as parse_sync from '../lib/sync'
-import {CastingContext, Info, Options, Parser, CsvError} from '../lib/index'
+import parse, {CastingContext, Info, Options, Parser, CsvError} from '../lib/index.js'
+import parse_sync from '../lib/sync.js'
describe('API Types', () => {
@@ -78,7 +77,7 @@ describe('API Types', () => {
const data: object = parse_sync("")
typeof data
}catch (err){
- if (err instanceof parse.CsvError){
+ if (err instanceof CsvError){
err.message
}
}
diff --git a/packages/csv-parse/test/api.write.coffee b/packages/csv-parse/test/api.write.coffee
index 1fb367ce3..99707eb0b 100644
--- a/packages/csv-parse/test/api.write.coffee
+++ b/packages/csv-parse/test/api.write.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'API write', ->
diff --git a/packages/csv-parse/test/info.comment_lines.coffee b/packages/csv-parse/test/info.comment_lines.coffee
index 442721668..543330f39 100644
--- a/packages/csv-parse/test/info.comment_lines.coffee
+++ b/packages/csv-parse/test/info.comment_lines.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'info comment_lines', ->
diff --git a/packages/csv-parse/test/info.empty_lines.coffee b/packages/csv-parse/test/info.empty_lines.coffee
index 7dc4ff632..da5a2d1cb 100644
--- a/packages/csv-parse/test/info.empty_lines.coffee
+++ b/packages/csv-parse/test/info.empty_lines.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'info empty_lines', ->
diff --git a/packages/csv-parse/test/info.invalid_field_length.coffee b/packages/csv-parse/test/info.invalid_field_length.coffee
index 1ccdc4b4c..b43e086b6 100644
--- a/packages/csv-parse/test/info.invalid_field_length.coffee
+++ b/packages/csv-parse/test/info.invalid_field_length.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'info invalid_field_length', ->
diff --git a/packages/csv-parse/test/info.lines.coffee b/packages/csv-parse/test/info.lines.coffee
index 82822fd99..3bfe57186 100644
--- a/packages/csv-parse/test/info.lines.coffee
+++ b/packages/csv-parse/test/info.lines.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'properties lines', ->
diff --git a/packages/csv-parse/test/info.records.coffee b/packages/csv-parse/test/info.records.coffee
index 83a0e3e81..81ae533a0 100644
--- a/packages/csv-parse/test/info.records.coffee
+++ b/packages/csv-parse/test/info.records.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'properties count records', ->
diff --git a/packages/csv-parse/test/loaders/all.mjs b/packages/csv-parse/test/loaders/all.mjs
new file mode 100644
index 000000000..4a3828589
--- /dev/null
+++ b/packages/csv-parse/test/loaders/all.mjs
@@ -0,0 +1,37 @@
+
+import * as coffee from './coffee.mjs'
+import * as ts from 'ts-node/esm'
+
+const coffeeRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+const tsRegex = /\.ts$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ if (coffeeRegex.test(specifier)) {
+ return coffee.resolve.apply(this, arguments)
+ }
+ if (tsRegex.test(specifier)) {
+ return ts.resolve.apply(this, arguments)
+ }
+ return ts.resolve.apply(this, arguments);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ if (coffeeRegex.test(url)) {
+ return coffee.getFormat.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.getFormat.apply(this, arguments)
+ }
+ return ts.getFormat.apply(this, arguments);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url } = context;
+ if (coffeeRegex.test(url)) {
+ return coffee.transformSource.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.transformSource.apply(this, arguments)
+ }
+ return ts.transformSource.apply(this, arguments);
+}
diff --git a/packages/csv-parse/test/loaders/coffee.mjs b/packages/csv-parse/test/loaders/coffee.mjs
new file mode 100644
index 000000000..f4945adb7
--- /dev/null
+++ b/packages/csv-parse/test/loaders/coffee.mjs
@@ -0,0 +1,50 @@
+// coffeescript-loader.mjs
+import { URL, pathToFileURL } from 'url';
+import CoffeeScript from 'coffeescript';
+import { cwd } from 'process';
+
+const baseURL = pathToFileURL(`${cwd()}/`).href;
+
+// CoffeeScript files end in .coffee, .litcoffee or .coffee.md.
+const extensionsRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ const { parentURL = baseURL } = context;
+ // Node.js normally errors on unknown file extensions, so return a URL for
+ // specifiers ending in the CoffeeScript file extensions.
+ if (extensionsRegex.test(specifier)) {
+ return {
+ url: new URL(specifier, parentURL).href,
+ stop: true
+ };
+ }
+ // Let Node.js handle all other specifiers.
+ return defaultResolve(specifier, context, defaultResolve);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ // Now that we patched resolve to let CoffeeScript URLs through, we need to
+ // tell Node.js what format such URLs should be interpreted as. For the
+ // purposes of this loader, all CoffeeScript URLs are ES modules.
+ if (extensionsRegex.test(url)) {
+ return {
+ format: 'module',
+ stop: true
+ };
+ }
+ // Let Node.js handle all other URLs.
+ return defaultGetFormat(url, context, defaultGetFormat);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url, format } = context;
+
+ if (extensionsRegex.test(url)) {
+ return {
+ source: CoffeeScript.compile(String(source), { bare: true })
+ };
+ }
+
+ // Let Node.js handle all other sources.
+ return defaultTransformSource(source, context, defaultTransformSource);
+}
diff --git a/packages/csv-parse/test/option.bom.coffee b/packages/csv-parse/test/option.bom.coffee
index 94d41006d..5566368ba 100644
--- a/packages/csv-parse/test/option.bom.coffee
+++ b/packages/csv-parse/test/option.bom.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `bom`', ->
diff --git a/packages/csv-parse/test/option.cast.coffee b/packages/csv-parse/test/option.cast.coffee
index 41c16ea8d..f5e4bf5ca 100644
--- a/packages/csv-parse/test/option.cast.coffee
+++ b/packages/csv-parse/test/option.cast.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `cast`', ->
diff --git a/packages/csv-parse/test/option.cast_date.coffee b/packages/csv-parse/test/option.cast_date.coffee
index 1e4ddf01f..1632cab69 100644
--- a/packages/csv-parse/test/option.cast_date.coffee
+++ b/packages/csv-parse/test/option.cast_date.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `cast_date`', ->
diff --git a/packages/csv-parse/test/option.columns.coffee b/packages/csv-parse/test/option.columns.coffee
index f01cfacbe..cf6cbf909 100644
--- a/packages/csv-parse/test/option.columns.coffee
+++ b/packages/csv-parse/test/option.columns.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `columns`', ->
diff --git a/packages/csv-parse/test/option.columns_duplicates_to_array.coffee b/packages/csv-parse/test/option.columns_duplicates_to_array.coffee
index 5b924db40..726078815 100644
--- a/packages/csv-parse/test/option.columns_duplicates_to_array.coffee
+++ b/packages/csv-parse/test/option.columns_duplicates_to_array.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `columns_duplicates_to_array`', ->
diff --git a/packages/csv-parse/test/option.comment.coffee b/packages/csv-parse/test/option.comment.coffee
index b56c80350..667315eb8 100644
--- a/packages/csv-parse/test/option.comment.coffee
+++ b/packages/csv-parse/test/option.comment.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `comment`', ->
diff --git a/packages/csv-parse/test/option.delimiter.coffee b/packages/csv-parse/test/option.delimiter.coffee
index 1be352009..d026314fd 100644
--- a/packages/csv-parse/test/option.delimiter.coffee
+++ b/packages/csv-parse/test/option.delimiter.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `delimiter`', ->
diff --git a/packages/csv-parse/test/option.encoding.coffee b/packages/csv-parse/test/option.encoding.coffee
index 6ce62f113..602db5cae 100644
--- a/packages/csv-parse/test/option.encoding.coffee
+++ b/packages/csv-parse/test/option.encoding.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `encoding`', ->
diff --git a/packages/csv-parse/test/option.escape.coffee b/packages/csv-parse/test/option.escape.coffee
index 941aae49a..059912644 100644
--- a/packages/csv-parse/test/option.escape.coffee
+++ b/packages/csv-parse/test/option.escape.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `escape`', ->
diff --git a/packages/csv-parse/test/option.from.coffee b/packages/csv-parse/test/option.from.coffee
index 29b6636ed..5fefd4959 100644
--- a/packages/csv-parse/test/option.from.coffee
+++ b/packages/csv-parse/test/option.from.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `from`', ->
diff --git a/packages/csv-parse/test/option.from_line.coffee b/packages/csv-parse/test/option.from_line.coffee
index d953b14be..e20316ba3 100644
--- a/packages/csv-parse/test/option.from_line.coffee
+++ b/packages/csv-parse/test/option.from_line.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `from_line`', ->
diff --git a/packages/csv-parse/test/option.ignore_last_delimiters.coffee b/packages/csv-parse/test/option.ignore_last_delimiters.coffee
index 696c4c243..2be552961 100644
--- a/packages/csv-parse/test/option.ignore_last_delimiters.coffee
+++ b/packages/csv-parse/test/option.ignore_last_delimiters.coffee
@@ -1,4 +1,5 @@
-parse = require '../lib'
+
+import parse from '../lib/index.js'
describe 'Option `ignore_last_delimiters`', ->
diff --git a/packages/csv-parse/test/option.info.coffee b/packages/csv-parse/test/option.info.coffee
index bf40988a2..70084a043 100644
--- a/packages/csv-parse/test/option.info.coffee
+++ b/packages/csv-parse/test/option.info.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `info`', ->
diff --git a/packages/csv-parse/test/option.ltrim.coffee b/packages/csv-parse/test/option.ltrim.coffee
index 67aa91f15..260395686 100644
--- a/packages/csv-parse/test/option.ltrim.coffee
+++ b/packages/csv-parse/test/option.ltrim.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `ltrim`', ->
diff --git a/packages/csv-parse/test/option.max_record_size.coffee b/packages/csv-parse/test/option.max_record_size.coffee
index 9663f72ac..aac5466f7 100644
--- a/packages/csv-parse/test/option.max_record_size.coffee
+++ b/packages/csv-parse/test/option.max_record_size.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `max_record_size`', ->
diff --git a/packages/csv-parse/test/option.objname.coffee b/packages/csv-parse/test/option.objname.coffee
index 0463a80c0..1bae67432 100644
--- a/packages/csv-parse/test/option.objname.coffee
+++ b/packages/csv-parse/test/option.objname.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `objname`', ->
diff --git a/packages/csv-parse/test/option.on_record.coffee b/packages/csv-parse/test/option.on_record.coffee
index 27380e7e3..2ae90c9e0 100644
--- a/packages/csv-parse/test/option.on_record.coffee
+++ b/packages/csv-parse/test/option.on_record.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `on_record`', ->
diff --git a/packages/csv-parse/test/option.quote.coffee b/packages/csv-parse/test/option.quote.coffee
index c16dc2310..dab5c3915 100644
--- a/packages/csv-parse/test/option.quote.coffee
+++ b/packages/csv-parse/test/option.quote.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `quote`', ->
diff --git a/packages/csv-parse/test/option.raw.coffee b/packages/csv-parse/test/option.raw.coffee
index 1a72da779..02f59417c 100644
--- a/packages/csv-parse/test/option.raw.coffee
+++ b/packages/csv-parse/test/option.raw.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `raw`', ->
diff --git a/packages/csv-parse/test/option.record_delimiter.coffee b/packages/csv-parse/test/option.record_delimiter.coffee
index 30f3d1dcf..f7f76d19f 100644
--- a/packages/csv-parse/test/option.record_delimiter.coffee
+++ b/packages/csv-parse/test/option.record_delimiter.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `record_delimiter`', ->
diff --git a/packages/csv-parse/test/option.relax.coffee b/packages/csv-parse/test/option.relax.coffee
index 5cc37dca7..f6d2d74bf 100644
--- a/packages/csv-parse/test/option.relax.coffee
+++ b/packages/csv-parse/test/option.relax.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `relax`', ->
diff --git a/packages/csv-parse/test/option.relax_column_count.coffee b/packages/csv-parse/test/option.relax_column_count.coffee
index 3b2c0194d..e04042cfb 100644
--- a/packages/csv-parse/test/option.relax_column_count.coffee
+++ b/packages/csv-parse/test/option.relax_column_count.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `relax_column_count`', ->
diff --git a/packages/csv-parse/test/option.rtrim.coffee b/packages/csv-parse/test/option.rtrim.coffee
index b2880224e..72e0f0836 100644
--- a/packages/csv-parse/test/option.rtrim.coffee
+++ b/packages/csv-parse/test/option.rtrim.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `rtrim`', ->
diff --git a/packages/csv-parse/test/option.skip_empty_lines.coffee b/packages/csv-parse/test/option.skip_empty_lines.coffee
index 17b605afa..16011efe1 100644
--- a/packages/csv-parse/test/option.skip_empty_lines.coffee
+++ b/packages/csv-parse/test/option.skip_empty_lines.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `skip_empty_lines`', ->
diff --git a/packages/csv-parse/test/option.skip_lines_with_empty_values.coffee b/packages/csv-parse/test/option.skip_lines_with_empty_values.coffee
index f8b203dc2..b3097d563 100644
--- a/packages/csv-parse/test/option.skip_lines_with_empty_values.coffee
+++ b/packages/csv-parse/test/option.skip_lines_with_empty_values.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `skip_lines_with_empty_values`', ->
diff --git a/packages/csv-parse/test/option.skip_lines_with_error.coffee b/packages/csv-parse/test/option.skip_lines_with_error.coffee
index 4ef531e43..50d0ca268 100644
--- a/packages/csv-parse/test/option.skip_lines_with_error.coffee
+++ b/packages/csv-parse/test/option.skip_lines_with_error.coffee
@@ -1,6 +1,6 @@
-parse = require '../lib'
-assert_error = require './api.assert_error'
+import parse from '../lib/index.js'
+import {assert_error} from './api.assert_error.coffee'
describe 'Option `skip_lines_with_error`', ->
diff --git a/packages/csv-parse/test/option.to.coffee b/packages/csv-parse/test/option.to.coffee
index da0fd86ed..3cc073f78 100644
--- a/packages/csv-parse/test/option.to.coffee
+++ b/packages/csv-parse/test/option.to.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `to`', ->
diff --git a/packages/csv-parse/test/option.to_line.coffee b/packages/csv-parse/test/option.to_line.coffee
index 40cd0afdb..be7e82b22 100644
--- a/packages/csv-parse/test/option.to_line.coffee
+++ b/packages/csv-parse/test/option.to_line.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `to_line`', ->
diff --git a/packages/csv-parse/test/option.trim.coffee b/packages/csv-parse/test/option.trim.coffee
index 8ff43729e..9f1fad076 100644
--- a/packages/csv-parse/test/option.trim.coffee
+++ b/packages/csv-parse/test/option.trim.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Option `rtrim`', ->
diff --git a/packages/csv-parse/test/options.coffee b/packages/csv-parse/test/options.coffee
index 7bf444b63..a1c54c097 100644
--- a/packages/csv-parse/test/options.coffee
+++ b/packages/csv-parse/test/options.coffee
@@ -1,5 +1,5 @@
-parse = require '../lib'
+import parse from '../lib/index.js'
describe 'Options', ->
diff --git a/packages/csv-parse/test/samples.coffee b/packages/csv-parse/test/samples.coffee
index a5ddd228d..e85d0ac1e 100644
--- a/packages/csv-parse/test/samples.coffee
+++ b/packages/csv-parse/test/samples.coffee
@@ -1,17 +1,15 @@
-fs = require('fs').promises
-path = require 'path'
-{exec} = require 'child_process'
+import fs from 'fs'
+import path from 'path'
+import {exec} from 'child_process'
-# `describe` label doesnt print because the code inside calling `it` is asynchronous.
-# From Mocha.js doc
-# https://mochajs.org/#dynamically-generating-tests
-# With top-level await you can collect your test data in a dynamic and asynchronous way while the test file is being loaded
+import { fileURLToPath } from 'url';
+__dirname = path.dirname fileURLToPath `import.meta.url`
+dir = path.resolve __dirname, '../samples'
+samples = fs.readdirSync dir
describe 'Samples', ->
- dir = path.resolve __dirname, '../samples'
- samples = await fs.readdir dir
for sample in samples
continue unless /\.js$/.test sample
it "Sample #{sample}", (callback) ->
diff --git a/packages/csv-parse/test/spectrum.coffee b/packages/csv-parse/test/spectrum.coffee
index 6f8ac92c4..3269de1f8 100644
--- a/packages/csv-parse/test/spectrum.coffee
+++ b/packages/csv-parse/test/spectrum.coffee
@@ -1,7 +1,7 @@
-spectrum = require 'csv-spectrum'
-each = require 'each'
-parse = require '../lib'
+import {default as spectrum} from 'csv-spectrum'
+import {default as each} from 'each'
+import parse from '../lib/index.js'
describe 'spectrum', ->
diff --git a/packages/csv-parse/tsconfig.json b/packages/csv-parse/tsconfig.json
index 9f40782ea..4db508a7c 100644
--- a/packages/csv-parse/tsconfig.json
+++ b/packages/csv-parse/tsconfig.json
@@ -1,7 +1,8 @@
{
"compileOnSave": false,
"compilerOptions": {
- "target": "es6",
+ "esModuleInterop": true,
+ "module": "ES2020",
"moduleResolution": "node",
"strict": true,
}
diff --git a/packages/csv-stringify/lib/browser/index.js b/packages/csv-stringify/lib/browser/index.js
index 43f662234..a681c232d 100644
--- a/packages/csv-stringify/lib/browser/index.js
+++ b/packages/csv-stringify/lib/browser/index.js
@@ -2,6 +2,13 @@
(function (Buffer,setImmediate){(function (){
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.stringify = exports["default"] = void 0;
+
+var _stream = require("stream");
+
function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
@@ -50,15 +57,6 @@ function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Re
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-/*
-CSV Stringify
-
-Please look at the [project documentation](https://csv.js.org/stringify/) for
-additional information.
-*/
-var _require = require('stream'),
- Transform = _require.Transform;
-
var bom_utf8 = Buffer.from([239, 187, 191]);
var Stringifier = /*#__PURE__*/function (_Transform) {
@@ -711,7 +709,7 @@ var Stringifier = /*#__PURE__*/function (_Transform) {
}]);
return Stringifier;
-}(Transform);
+}(_stream.Transform);
var stringify = function stringify() {
var data, options, callback;
@@ -793,6 +791,8 @@ var stringify = function stringify() {
return stringifier;
};
+exports.stringify = stringify;
+
var CsvError = /*#__PURE__*/function (_Error) {
_inherits(CsvError, _Error);
@@ -833,7 +833,8 @@ var CsvError = /*#__PURE__*/function (_Error) {
stringify.Stringifier = Stringifier;
stringify.CsvError = CsvError;
-module.exports = stringify;
+var _default = stringify;
+exports["default"] = _default;
var isObject = function isObject(obj) {
return _typeof(obj) === 'object' && obj !== null && !Array.isArray(obj);
diff --git a/packages/csv-stringify/lib/browser/sync.js b/packages/csv-stringify/lib/browser/sync.js
index ae22b457b..de2d4f93a 100644
--- a/packages/csv-stringify/lib/browser/sync.js
+++ b/packages/csv-stringify/lib/browser/sync.js
@@ -2,6 +2,13 @@
(function (Buffer,setImmediate){(function (){
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.stringify = exports["default"] = void 0;
+
+var _stream = require("stream");
+
function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
@@ -50,15 +57,6 @@ function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Re
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-/*
-CSV Stringify
-
-Please look at the [project documentation](https://csv.js.org/stringify/) for
-additional information.
-*/
-var _require = require('stream'),
- Transform = _require.Transform;
-
var bom_utf8 = Buffer.from([239, 187, 191]);
var Stringifier = /*#__PURE__*/function (_Transform) {
@@ -711,7 +709,7 @@ var Stringifier = /*#__PURE__*/function (_Transform) {
}]);
return Stringifier;
-}(Transform);
+}(_stream.Transform);
var stringify = function stringify() {
var data, options, callback;
@@ -793,6 +791,8 @@ var stringify = function stringify() {
return stringifier;
};
+exports.stringify = stringify;
+
var CsvError = /*#__PURE__*/function (_Error) {
_inherits(CsvError, _Error);
@@ -833,7 +833,8 @@ var CsvError = /*#__PURE__*/function (_Error) {
stringify.Stringifier = Stringifier;
stringify.CsvError = CsvError;
-module.exports = stringify;
+var _default = stringify;
+exports["default"] = _default;
var isObject = function isObject(obj) {
return _typeof(obj) === 'object' && obj !== null && !Array.isArray(obj);
@@ -934,23 +935,29 @@ var get = function get(object, path) {
(function (Buffer){(function (){
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = _default;
+
+var _index = _interopRequireDefault(require("./index.js"));
+
+var _string_decoder = require("string_decoder");
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
-var stringify = require('.');
-
-var _require = require('string_decoder'),
- StringDecoder = _require.StringDecoder;
-
-module.exports = function (records) {
+function _default(records) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var data = [];
if (Buffer.isBuffer(records)) {
- var decoder = new StringDecoder();
+ var decoder = new _string_decoder.StringDecoder();
records = decoder.write(records);
}
@@ -960,7 +967,7 @@ module.exports = function (records) {
}
}
- var stringifier = new stringify.Stringifier(options);
+ var stringifier = new _index["default"].Stringifier(options);
stringifier.on('data', onData);
var _iterator = _createForOfIteratorHelper(records),
@@ -980,10 +987,10 @@ module.exports = function (records) {
stringifier.end();
stringifier.off('data', onData);
return data.join('');
-};
+}
}).call(this)}).call(this,{"isBuffer":require("../node_modules/is-buffer/index.js")})
-},{".":1,"../node_modules/is-buffer/index.js":9,"string_decoder":27}],3:[function(require,module,exports){
+},{"../node_modules/is-buffer/index.js":9,"./index.js":1,"string_decoder":27}],3:[function(require,module,exports){
'use strict'
exports.byteLength = byteLength
diff --git a/packages/csv-stringify/lib/es5/index.js b/packages/csv-stringify/lib/es5/index.js
index 77d4dc8a8..c5544d1ae 100644
--- a/packages/csv-stringify/lib/es5/index.js
+++ b/packages/csv-stringify/lib/es5/index.js
@@ -1,5 +1,12 @@
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.stringify = exports["default"] = void 0;
+
+var _stream = require("stream");
+
function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }
function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }
@@ -48,15 +55,6 @@ function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Re
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-/*
-CSV Stringify
-
-Please look at the [project documentation](https://csv.js.org/stringify/) for
-additional information.
-*/
-var _require = require('stream'),
- Transform = _require.Transform;
-
var bom_utf8 = Buffer.from([239, 187, 191]);
var Stringifier = /*#__PURE__*/function (_Transform) {
@@ -709,7 +707,7 @@ var Stringifier = /*#__PURE__*/function (_Transform) {
}]);
return Stringifier;
-}(Transform);
+}(_stream.Transform);
var stringify = function stringify() {
var data, options, callback;
@@ -791,6 +789,8 @@ var stringify = function stringify() {
return stringifier;
};
+exports.stringify = stringify;
+
var CsvError = /*#__PURE__*/function (_Error) {
_inherits(CsvError, _Error);
@@ -831,7 +831,8 @@ var CsvError = /*#__PURE__*/function (_Error) {
stringify.Stringifier = Stringifier;
stringify.CsvError = CsvError;
-module.exports = stringify;
+var _default = stringify;
+exports["default"] = _default;
var isObject = function isObject(obj) {
return _typeof(obj) === 'object' && obj !== null && !Array.isArray(obj);
diff --git a/packages/csv-stringify/lib/es5/sync.js b/packages/csv-stringify/lib/es5/sync.js
index f8ad02d31..6e08d3b10 100644
--- a/packages/csv-stringify/lib/es5/sync.js
+++ b/packages/csv-stringify/lib/es5/sync.js
@@ -1,22 +1,28 @@
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = _default;
+
+var _index = _interopRequireDefault(require("./index.js"));
+
+var _string_decoder = require("string_decoder");
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
-var stringify = require('.');
-
-var _require = require('string_decoder'),
- StringDecoder = _require.StringDecoder;
-
-module.exports = function (records) {
+function _default(records) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var data = [];
if (Buffer.isBuffer(records)) {
- var decoder = new StringDecoder();
+ var decoder = new _string_decoder.StringDecoder();
records = decoder.write(records);
}
@@ -26,7 +32,7 @@ module.exports = function (records) {
}
}
- var stringifier = new stringify.Stringifier(options);
+ var stringifier = new _index["default"].Stringifier(options);
stringifier.on('data', onData);
var _iterator = _createForOfIteratorHelper(records),
@@ -46,4 +52,4 @@ module.exports = function (records) {
stringifier.end();
stringifier.off('data', onData);
return data.join('');
-};
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/packages/csv-stringify/lib/index.d.ts b/packages/csv-stringify/lib/index.d.ts
index 91d0c4dde..667821a4f 100644
--- a/packages/csv-stringify/lib/index.d.ts
+++ b/packages/csv-stringify/lib/index.d.ts
@@ -1,101 +1,103 @@
///
import * as stream from "stream";
-export = stringify
-declare function stringify(callback?: stringify.Callback): stringify.Stringifier
-declare function stringify(options: stringify.Options, callback?: stringify.Callback): stringify.Stringifier
-declare function stringify(input: stringify.Input, callback?: stringify.Callback): stringify.Stringifier
-declare function stringify(input: stringify.Input, options?: stringify.Options, callback?: stringify.Callback): stringify.Stringifier
-declare namespace stringify {
- type Callback = (err: Error | undefined, output: string) => void
- type RecordDelimiter = string | Buffer | 'auto' | 'unix' | 'mac' | 'windows' | 'ascii' | 'unicode'
- type Cast = (value: T, context: CastingContext) => string
- type PlainObject = Record
- type Input = any[]
- interface ColumnOption {
- key: string
- header?: string
- }
- interface CastingContext {
- readonly column?: number | string;
- readonly header: boolean;
- readonly index: number;
- readonly records: number;
- }
- interface Options {
- /**
- * Prepend the byte order mark (BOM) to the output stream.
- */
- bom?: boolean
- /**
- * Key-value object which defines custom cast for certain data types
- */
- cast?: {
- boolean?: Cast
- date?: Cast
- number?: Cast
- /**
- * Custom formatter for generic object values
- */
- object?: Cast>
- string?: Cast
- }
- /**
- * List of fields, applied when `transform` returns an object
- * order matters
- * read the transformer documentation for additionnal information
- * columns are auto discovered in the first record when the user write objects
- * can refer to nested properties of the input JSON
- * see the "header" option on how to print columns names on the first line
- */
- columns?: string[] | PlainObject | ColumnOption[]
- /**
- * Set the field delimiter, one character only, defaults to a comma.
- */
- delimiter?: string | Buffer
- /**
- * Add the value of "options.RecordDelimiter" on the last line, default to true.
- */
- eof?: boolean
- /**
- * Defaults to the escape read option.
- */
- escape?: string | Buffer
- /**
- * Display the column names on the first line if the columns option is provided or discovered.
- */
- header?: boolean
- /**
- * The quote characters, defaults to the ", an empty quote value will preserve the original field.
- */
- quote?: string | Buffer | boolean
- /**
- * Boolean, default to false, quote all the non-empty fields even if not required.
- */
- quoted?: boolean
-
- /**
- * Boolean, no default, quote empty fields and overrides `quoted_string` on empty strings when defined.
- */
- quoted_empty?: boolean
- /**
- * String or RegExp, no default, quote all fields matching a regular expression.
- */
- quoted_match?: string | RegExp
- /**
- * Boolean, default to false, quote all fields of type string even if not required.
- */
- quoted_string?: boolean
+export type Callback = (err: Error | undefined, output: string) => void
+export type RecordDelimiter = string | Buffer | 'auto' | 'unix' | 'mac' | 'windows' | 'ascii' | 'unicode'
+export type Cast = (value: T, context: CastingContext) => string
+export type PlainObject = Record
+export type Input = any[]
+export interface ColumnOption {
+ key: string
+ header?: string
+}
+export interface CastingContext {
+ readonly column?: number | string;
+ readonly header: boolean;
+ readonly index: number;
+ readonly records: number;
+}
+export interface Options {
+ /**
+ * Prepend the byte order mark (BOM) to the output stream.
+ */
+ bom?: boolean
+ /**
+ * Key-value object which defines custom cast for certain data types
+ */
+ cast?: {
+ boolean?: Cast
+ date?: Cast
+ number?: Cast
/**
- * String used to delimit record rows or a special value
- * special values are 'auto', 'unix', 'mac', 'windows', 'ascii', 'unicode'
- * defaults to 'auto' (discovered in source or 'unix' if no source is specified).
+ * Custom formatter for generic object values
*/
- record_delimiter?: RecordDelimiter
- }
- class Stringifier extends stream.Transform {
- constructor(options: Options)
- readonly options: Options
+ object?: Cast>
+ string?: Cast
}
+ /**
+ * List of fields, applied when `transform` returns an object
+ * order matters
+ * read the transformer documentation for additionnal information
+ * columns are auto discovered in the first record when the user write objects
+ * can refer to nested properties of the input JSON
+ * see the "header" option on how to print columns names on the first line
+ */
+ columns?: string[] | PlainObject | ColumnOption[]
+ /**
+ * Set the field delimiter, one character only, defaults to a comma.
+ */
+ delimiter?: string | Buffer
+ /**
+ * Add the value of "options.RecordDelimiter" on the last line, default to true.
+ */
+ eof?: boolean
+ /**
+ * Defaults to the escape read option.
+ */
+ escape?: string | Buffer
+ /**
+ * Display the column names on the first line if the columns option is provided or discovered.
+ */
+ header?: boolean
+ /**
+ * The quote characters, defaults to the ", an empty quote value will preserve the original field.
+ */
+ quote?: string | Buffer | boolean
+ /**
+ * Boolean, default to false, quote all the non-empty fields even if not required.
+ */
+ quoted?: boolean
+
+ /**
+ * Boolean, no default, quote empty fields and overrides `quoted_string` on empty strings when defined.
+ */
+ quoted_empty?: boolean
+ /**
+ * String or RegExp, no default, quote all fields matching a regular expression.
+ */
+ quoted_match?: string | RegExp
+ /**
+ * Boolean, default to false, quote all fields of type string even if not required.
+ */
+ quoted_string?: boolean
+ /**
+ * String used to delimit record rows or a special value
+ * special values are 'auto', 'unix', 'mac', 'windows', 'ascii', 'unicode'
+ * defaults to 'auto' (discovered in source or 'unix' if no source is specified).
+ */
+ record_delimiter?: RecordDelimiter
}
+
+export class Stringifier extends stream.Transform {
+ constructor(options: Options)
+ readonly options: Options
+}
+
+declare function stringify(callback?: Callback): Stringifier
+declare function stringify(options: Options, callback?: Callback): Stringifier
+declare function stringify(input: Input, callback?: Callback): Stringifier
+declare function stringify(input: Input, options?: Options, callback?: Callback): Stringifier
+
+export default stringify
+export {stringify}
diff --git a/packages/csv-stringify/lib/index.js b/packages/csv-stringify/lib/index.js
index c309ff9fe..c28c59927 100644
--- a/packages/csv-stringify/lib/index.js
+++ b/packages/csv-stringify/lib/index.js
@@ -6,7 +6,7 @@ Please look at the [project documentation](https://csv.js.org/stringify/) for
additional information.
*/
-const { Transform } = require('stream')
+import { Transform } from 'stream'
const bom_utf8 = Buffer.from([239, 187, 191])
class Stringifier extends Transform {
@@ -536,10 +536,10 @@ class CsvError extends Error {
}
stringify.Stringifier = Stringifier
-
stringify.CsvError = CsvError
-module.exports = stringify
+export default stringify
+export {stringify}
const isObject = function(obj){
return typeof obj === 'object' && obj !== null && ! Array.isArray(obj)
diff --git a/packages/csv-stringify/lib/sync.d.ts b/packages/csv-stringify/lib/sync.d.ts
index 7c4aa9aee..5c76d1ef6 100644
--- a/packages/csv-stringify/lib/sync.d.ts
+++ b/packages/csv-stringify/lib/sync.d.ts
@@ -1,6 +1,6 @@
-import * as csvStringify from './index'
-export = stringify
+import {Input, Options} from './index'
-declare function stringify(input: csvStringify.Input, options?: csvStringify.Options): string
-declare namespace stringify {}
+declare function stringify(input: Input, options?: Options): string
+
+export default stringify
diff --git a/packages/csv-stringify/lib/sync.js b/packages/csv-stringify/lib/sync.js
index b44060f20..f6e84780f 100644
--- a/packages/csv-stringify/lib/sync.js
+++ b/packages/csv-stringify/lib/sync.js
@@ -1,8 +1,8 @@
-const stringify = require('.')
-const {StringDecoder} = require('string_decoder')
+import stringify from './index.js'
+import {StringDecoder} from 'string_decoder'
-module.exports = function(records, options={}){
+export default function(records, options={}){
const data = []
if(Buffer.isBuffer(records)){
const decoder = new StringDecoder()
diff --git a/packages/csv-stringify/package.json b/packages/csv-stringify/package.json
index 1433ecefd..3432eb421 100644
--- a/packages/csv-stringify/package.json
+++ b/packages/csv-stringify/package.json
@@ -50,16 +50,15 @@
],
"main": "./lib",
"mocha": {
- "throw-deprecation": true,
- "require": [
- "should",
- "coffeescript/register",
- "ts-node/register"
- ],
"inline-diffs": true,
- "timeout": 40000,
+ "loader": "./test/loaders/all.mjs",
+ "recursive": true,
"reporter": "spec",
- "recursive": true
+ "require": [
+ "should"
+ ],
+ "throw-deprecation": true,
+ "timeout": 40000
},
"scripts": {
"build:babel": "cd lib && babel *.js -d es5 && cd ..",
@@ -69,5 +68,6 @@
"pretest": "npm run build",
"test": "mocha test/**/*.{coffee,ts}"
},
+ "type": "module",
"types": "./lib/index.d.ts"
}
diff --git a/packages/csv-stringify/samples/api.callback.js b/packages/csv-stringify/samples/api.callback.js
index c3742c64e..db7e43930 100644
--- a/packages/csv-stringify/samples/api.callback.js
+++ b/packages/csv-stringify/samples/api.callback.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
[ '1', '2', '3', '4' ],
diff --git a/packages/csv-stringify/samples/api.pipe.js b/packages/csv-stringify/samples/api.pipe.js
index 2be85a990..c3e957c78 100644
--- a/packages/csv-stringify/samples/api.pipe.js
+++ b/packages/csv-stringify/samples/api.pipe.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const generate = require('csv-generate')
+import stringify from '../lib/index.js'
+import generate from 'csv-generate'
generate({
objectMode: true,
diff --git a/packages/csv-stringify/samples/api.stream.js b/packages/csv-stringify/samples/api.stream.js
index 573510ac7..ce59437b0 100644
--- a/packages/csv-stringify/samples/api.stream.js
+++ b/packages/csv-stringify/samples/api.stream.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
const data = []
const stringifier = stringify({
diff --git a/packages/csv-stringify/samples/api.sync.memory.js b/packages/csv-stringify/samples/api.sync.memory.js
index 3b6f5ded8..649d08a48 100644
--- a/packages/csv-stringify/samples/api.sync.memory.js
+++ b/packages/csv-stringify/samples/api.sync.memory.js
@@ -1,4 +1,5 @@
-const stringify = require('../lib/sync');
+
+import stringify from '../lib/sync.js'
const r = v => (v / 1024 / 1024).toFixed(2);
const printMemoryUsage = () => {
diff --git a/packages/csv-stringify/samples/option.cast.js b/packages/csv-stringify/samples/option.cast.js
index e58aadc9e..1715c124d 100644
--- a/packages/csv-stringify/samples/option.cast.js
+++ b/packages/csv-stringify/samples/option.cast.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([{
name: 'foo',
diff --git a/packages/csv-stringify/samples/option.columns_array_with_objects.js b/packages/csv-stringify/samples/option.columns_array_with_objects.js
index 132e265b0..ed17bad3e 100644
--- a/packages/csv-stringify/samples/option.columns_array_with_objects.js
+++ b/packages/csv-stringify/samples/option.columns_array_with_objects.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify( [
{ a: '1', b: '2' }
diff --git a/packages/csv-stringify/samples/option.columns_array_with_strings.js b/packages/csv-stringify/samples/option.columns_array_with_strings.js
index 3b2501fa9..d986e6336 100644
--- a/packages/csv-stringify/samples/option.columns_array_with_strings.js
+++ b/packages/csv-stringify/samples/option.columns_array_with_strings.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify( [
{ a: '1', b: '2' }
diff --git a/packages/csv-stringify/samples/option.columns_undefined.js b/packages/csv-stringify/samples/option.columns_undefined.js
index 71318bb4d..8bb20c5ca 100644
--- a/packages/csv-stringify/samples/option.columns_undefined.js
+++ b/packages/csv-stringify/samples/option.columns_undefined.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
{ year: 'XXXX', phone: 'XXX XXXX', nocolumn: 'XXX' },
diff --git a/packages/csv-stringify/samples/option.delimiter_multiple.js b/packages/csv-stringify/samples/option.delimiter_multiple.js
index bdb00e862..85808ff3c 100644
--- a/packages/csv-stringify/samples/option.delimiter_multiple.js
+++ b/packages/csv-stringify/samples/option.delimiter_multiple.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
['1', '2'],
diff --git a/packages/csv-stringify/samples/option.delimiter_single.js b/packages/csv-stringify/samples/option.delimiter_single.js
index 912afd48e..d1b8ee08e 100644
--- a/packages/csv-stringify/samples/option.delimiter_single.js
+++ b/packages/csv-stringify/samples/option.delimiter_single.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
['1', '2'],
diff --git a/packages/csv-stringify/samples/option.header.js b/packages/csv-stringify/samples/option.header.js
index 764f7cb72..029647d25 100644
--- a/packages/csv-stringify/samples/option.header.js
+++ b/packages/csv-stringify/samples/option.header.js
@@ -3,8 +3,8 @@
// birthYear,phone
// OMH,ONKCHhJmjadoA
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
{ year: 'XXXX', phone: 'XXX XXXX' },
diff --git a/packages/csv-stringify/samples/option.header_width_columns_object.js b/packages/csv-stringify/samples/option.header_width_columns_object.js
index b25f448e8..d298a1ac1 100644
--- a/packages/csv-stringify/samples/option.header_width_columns_object.js
+++ b/packages/csv-stringify/samples/option.header_width_columns_object.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify( [
{ a: '1', b: '2' }
diff --git a/packages/csv-stringify/samples/option.header_with_columns_array_strings.js b/packages/csv-stringify/samples/option.header_with_columns_array_strings.js
index 02db31505..40aa9f8f0 100644
--- a/packages/csv-stringify/samples/option.header_with_columns_array_strings.js
+++ b/packages/csv-stringify/samples/option.header_with_columns_array_strings.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify( [
{ a: '1', b: '2' }
diff --git a/packages/csv-stringify/samples/option.quoted.js b/packages/csv-stringify/samples/option.quoted.js
index 861326d72..e4e657981 100644
--- a/packages/csv-stringify/samples/option.quoted.js
+++ b/packages/csv-stringify/samples/option.quoted.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
['1', ''],
diff --git a/packages/csv-stringify/samples/option.quoted_empty.js b/packages/csv-stringify/samples/option.quoted_empty.js
index 98ad50df8..3b44a17a3 100644
--- a/packages/csv-stringify/samples/option.quoted_empty.js
+++ b/packages/csv-stringify/samples/option.quoted_empty.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
['1', ''],
diff --git a/packages/csv-stringify/samples/option.quoted_match_regexp.js b/packages/csv-stringify/samples/option.quoted_match_regexp.js
index 8f0c5ef8f..87d8efed5 100644
--- a/packages/csv-stringify/samples/option.quoted_match_regexp.js
+++ b/packages/csv-stringify/samples/option.quoted_match_regexp.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
['a value', '.', 'value.with.dot'],
diff --git a/packages/csv-stringify/samples/option.quoted_match_string.js b/packages/csv-stringify/samples/option.quoted_match_string.js
index 57e4dee83..3c9a18163 100644
--- a/packages/csv-stringify/samples/option.quoted_match_string.js
+++ b/packages/csv-stringify/samples/option.quoted_match_string.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
['a value', '.', 'value.with.dot'],
diff --git a/packages/csv-stringify/samples/option.quoted_string.js b/packages/csv-stringify/samples/option.quoted_string.js
index c56793808..37e4ccf3f 100644
--- a/packages/csv-stringify/samples/option.quoted_string.js
+++ b/packages/csv-stringify/samples/option.quoted_string.js
@@ -1,6 +1,6 @@
-const stringify = require('../lib')
-const assert = require('assert')
+import stringify from '../lib/index.js'
+import assert from 'assert'
stringify([
['1', '', true, 2],
diff --git a/packages/csv-stringify/test/api.coffee b/packages/csv-stringify/test/api.coffee
index 716c35444..0cdd3461d 100644
--- a/packages/csv-stringify/test/api.coffee
+++ b/packages/csv-stringify/test/api.coffee
@@ -1,7 +1,7 @@
-fs = require 'fs'
-generate = require 'csv-generate'
-stringify = require '../lib'
+import fs from 'fs'
+import generate from 'csv-generate'
+import stringify from '../lib/index.js'
describe 'API', ->
diff --git a/packages/csv-stringify/test/api.pipe.coffee b/packages/csv-stringify/test/api.pipe.coffee
index 495c79911..fe0bf5a6c 100644
--- a/packages/csv-stringify/test/api.pipe.coffee
+++ b/packages/csv-stringify/test/api.pipe.coffee
@@ -1,7 +1,7 @@
-fs = require 'fs'
-generate = require 'csv-generate'
-stringify = require '../lib'
+import fs from 'fs'
+import generate from 'csv-generate'
+import stringify from '../lib/index.js'
describe 'API pipe', ->
diff --git a/packages/csv-stringify/test/api.types.ts b/packages/csv-stringify/test/api.types.ts
index 50d3763b0..830b829db 100644
--- a/packages/csv-stringify/test/api.types.ts
+++ b/packages/csv-stringify/test/api.types.ts
@@ -1,7 +1,6 @@
import 'should'
-import * as stringify from '../lib/index'
-import {CastingContext, Options, Stringifier} from '../lib/index'
+import stringify, {CastingContext, Options, Stringifier} from '../lib/index.js'
describe('API Types', () => {
diff --git a/packages/csv-stringify/test/api.write.coffee b/packages/csv-stringify/test/api.write.coffee
index 79c7e7b95..8d4ae4406 100644
--- a/packages/csv-stringify/test/api.write.coffee
+++ b/packages/csv-stringify/test/api.write.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'API write', ->
diff --git a/packages/csv-stringify/test/loaders/all.mjs b/packages/csv-stringify/test/loaders/all.mjs
new file mode 100644
index 000000000..4a3828589
--- /dev/null
+++ b/packages/csv-stringify/test/loaders/all.mjs
@@ -0,0 +1,37 @@
+
+import * as coffee from './coffee.mjs'
+import * as ts from 'ts-node/esm'
+
+const coffeeRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+const tsRegex = /\.ts$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ if (coffeeRegex.test(specifier)) {
+ return coffee.resolve.apply(this, arguments)
+ }
+ if (tsRegex.test(specifier)) {
+ return ts.resolve.apply(this, arguments)
+ }
+ return ts.resolve.apply(this, arguments);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ if (coffeeRegex.test(url)) {
+ return coffee.getFormat.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.getFormat.apply(this, arguments)
+ }
+ return ts.getFormat.apply(this, arguments);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url } = context;
+ if (coffeeRegex.test(url)) {
+ return coffee.transformSource.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.transformSource.apply(this, arguments)
+ }
+ return ts.transformSource.apply(this, arguments);
+}
diff --git a/packages/csv-stringify/test/loaders/coffee.mjs b/packages/csv-stringify/test/loaders/coffee.mjs
new file mode 100644
index 000000000..f4945adb7
--- /dev/null
+++ b/packages/csv-stringify/test/loaders/coffee.mjs
@@ -0,0 +1,50 @@
+// coffeescript-loader.mjs
+import { URL, pathToFileURL } from 'url';
+import CoffeeScript from 'coffeescript';
+import { cwd } from 'process';
+
+const baseURL = pathToFileURL(`${cwd()}/`).href;
+
+// CoffeeScript files end in .coffee, .litcoffee or .coffee.md.
+const extensionsRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ const { parentURL = baseURL } = context;
+ // Node.js normally errors on unknown file extensions, so return a URL for
+ // specifiers ending in the CoffeeScript file extensions.
+ if (extensionsRegex.test(specifier)) {
+ return {
+ url: new URL(specifier, parentURL).href,
+ stop: true
+ };
+ }
+ // Let Node.js handle all other specifiers.
+ return defaultResolve(specifier, context, defaultResolve);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ // Now that we patched resolve to let CoffeeScript URLs through, we need to
+ // tell Node.js what format such URLs should be interpreted as. For the
+ // purposes of this loader, all CoffeeScript URLs are ES modules.
+ if (extensionsRegex.test(url)) {
+ return {
+ format: 'module',
+ stop: true
+ };
+ }
+ // Let Node.js handle all other URLs.
+ return defaultGetFormat(url, context, defaultGetFormat);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url, format } = context;
+
+ if (extensionsRegex.test(url)) {
+ return {
+ source: CoffeeScript.compile(String(source), { bare: true })
+ };
+ }
+
+ // Let Node.js handle all other sources.
+ return defaultTransformSource(source, context, defaultTransformSource);
+}
diff --git a/packages/csv-stringify/test/option.bom.coffee b/packages/csv-stringify/test/option.bom.coffee
index 6977ed454..a1c9030e5 100644
--- a/packages/csv-stringify/test/option.bom.coffee
+++ b/packages/csv-stringify/test/option.bom.coffee
@@ -1,6 +1,6 @@
-stringify = require '../lib'
-stringifySync = require '../lib/sync'
+import stringify from '../lib/index.js'
+import stringifySync from '../lib/sync.js'
describe 'Option `bom`', ->
diff --git a/packages/csv-stringify/test/option.cast.coffee b/packages/csv-stringify/test/option.cast.coffee
index 464c46fc8..31386f073 100644
--- a/packages/csv-stringify/test/option.cast.coffee
+++ b/packages/csv-stringify/test/option.cast.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `cast`', ->
diff --git a/packages/csv-stringify/test/option.columns.coffee b/packages/csv-stringify/test/option.columns.coffee
index bc1784807..ae71c8aa7 100644
--- a/packages/csv-stringify/test/option.columns.coffee
+++ b/packages/csv-stringify/test/option.columns.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `columns`', ->
diff --git a/packages/csv-stringify/test/option.delimiter.coffee b/packages/csv-stringify/test/option.delimiter.coffee
index b56492f16..8dacfae9e 100644
--- a/packages/csv-stringify/test/option.delimiter.coffee
+++ b/packages/csv-stringify/test/option.delimiter.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `delimiter`', ->
diff --git a/packages/csv-stringify/test/option.eof.coffee b/packages/csv-stringify/test/option.eof.coffee
index c039eb177..603b1dfcf 100644
--- a/packages/csv-stringify/test/option.eof.coffee
+++ b/packages/csv-stringify/test/option.eof.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `eof`', ->
diff --git a/packages/csv-stringify/test/option.escape.coffee b/packages/csv-stringify/test/option.escape.coffee
index 752fd43df..fee4d368a 100644
--- a/packages/csv-stringify/test/option.escape.coffee
+++ b/packages/csv-stringify/test/option.escape.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `escape`', ->
diff --git a/packages/csv-stringify/test/option.header.coffee b/packages/csv-stringify/test/option.header.coffee
index 998b90e40..65052f978 100644
--- a/packages/csv-stringify/test/option.header.coffee
+++ b/packages/csv-stringify/test/option.header.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `header`', ->
diff --git a/packages/csv-stringify/test/option.quote.coffee b/packages/csv-stringify/test/option.quote.coffee
index 108e545ac..a1e6beaa1 100644
--- a/packages/csv-stringify/test/option.quote.coffee
+++ b/packages/csv-stringify/test/option.quote.coffee
@@ -1,6 +1,5 @@
-fs = require 'fs'
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `quote`', ->
diff --git a/packages/csv-stringify/test/option.quoted.coffee b/packages/csv-stringify/test/option.quoted.coffee
index 71978d2e9..566442da6 100644
--- a/packages/csv-stringify/test/option.quoted.coffee
+++ b/packages/csv-stringify/test/option.quoted.coffee
@@ -1,6 +1,5 @@
-fs = require 'fs'
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `quoted`', ->
diff --git a/packages/csv-stringify/test/option.quoted_empty.coffee b/packages/csv-stringify/test/option.quoted_empty.coffee
index 2e978908f..8dc49aae3 100644
--- a/packages/csv-stringify/test/option.quoted_empty.coffee
+++ b/packages/csv-stringify/test/option.quoted_empty.coffee
@@ -1,6 +1,5 @@
-fs = require 'fs'
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `quoted_empty`', ->
diff --git a/packages/csv-stringify/test/option.quoted_match.coffee b/packages/csv-stringify/test/option.quoted_match.coffee
index 1add96eb2..cc4c13230 100644
--- a/packages/csv-stringify/test/option.quoted_match.coffee
+++ b/packages/csv-stringify/test/option.quoted_match.coffee
@@ -1,6 +1,5 @@
-fs = require 'fs'
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `quoted_match`', ->
diff --git a/packages/csv-stringify/test/option.quoted_string.coffee b/packages/csv-stringify/test/option.quoted_string.coffee
index 53a7a7463..7b01b8741 100644
--- a/packages/csv-stringify/test/option.quoted_string.coffee
+++ b/packages/csv-stringify/test/option.quoted_string.coffee
@@ -1,6 +1,5 @@
-fs = require 'fs'
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `quoted_string`', ->
diff --git a/packages/csv-stringify/test/option.record_delimiter.coffee b/packages/csv-stringify/test/option.record_delimiter.coffee
index b354839fd..c9ab0c2bf 100644
--- a/packages/csv-stringify/test/option.record_delimiter.coffee
+++ b/packages/csv-stringify/test/option.record_delimiter.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Option `record_delimiter`', ->
diff --git a/packages/csv-stringify/test/options.coffee b/packages/csv-stringify/test/options.coffee
index 2bea35c8a..b4100d902 100644
--- a/packages/csv-stringify/test/options.coffee
+++ b/packages/csv-stringify/test/options.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'Options', ->
diff --git a/packages/csv-stringify/test/samples.coffee b/packages/csv-stringify/test/samples.coffee
index bb8778d5f..e85d0ac1e 100644
--- a/packages/csv-stringify/test/samples.coffee
+++ b/packages/csv-stringify/test/samples.coffee
@@ -1,20 +1,17 @@
-fs = require 'fs'
-util = require 'util'
-path = require 'path'
-{exec} = require 'child_process'
-each = require 'each'
-
-it 'samples', (callback) ->
- dir = path.resolve __dirname, '../samples'
- fs.readdir dir, (err, samples ) ->
- return callback err if err
- each samples.filter( (sample) -> /\.js/.test.sample)
- .call (sample, callback) ->
+import fs from 'fs'
+import path from 'path'
+import {exec} from 'child_process'
+
+import { fileURLToPath } from 'url';
+__dirname = path.dirname fileURLToPath `import.meta.url`
+dir = path.resolve __dirname, '../samples'
+samples = fs.readdirSync dir
+
+describe 'Samples', ->
+
+ for sample in samples
+ continue unless /\.js$/.test sample
+ it "Sample #{sample}", (callback) ->
exec "node #{path.resolve dir, sample}", (err) ->
callback err
- .next callback
-
-
-
-
diff --git a/packages/csv-stringify/test/sync.coffee b/packages/csv-stringify/test/sync.coffee
index b05cbe6de..535ceb797 100644
--- a/packages/csv-stringify/test/sync.coffee
+++ b/packages/csv-stringify/test/sync.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib/sync'
+import stringify from '../lib/sync.js'
describe 'sync', ->
diff --git a/packages/csv-stringify/test/types.coffee b/packages/csv-stringify/test/types.coffee
index a944ecdb5..fc58fadb8 100644
--- a/packages/csv-stringify/test/types.coffee
+++ b/packages/csv-stringify/test/types.coffee
@@ -1,5 +1,5 @@
-stringify = require '../lib'
+import stringify from '../lib/index.js'
describe 'types', ->
diff --git a/packages/csv-stringify/tsconfig.json b/packages/csv-stringify/tsconfig.json
index 9f40782ea..4db508a7c 100644
--- a/packages/csv-stringify/tsconfig.json
+++ b/packages/csv-stringify/tsconfig.json
@@ -1,7 +1,8 @@
{
"compileOnSave": false,
"compilerOptions": {
- "target": "es6",
+ "esModuleInterop": true,
+ "module": "ES2020",
"moduleResolution": "node",
"strict": true,
}
diff --git a/packages/csv/lib/browser/index.js b/packages/csv/lib/browser/index.js
index 53bce8807..e69de29bb 100644
--- a/packages/csv/lib/browser/index.js
+++ b/packages/csv/lib/browser/index.js
@@ -1,10947 +0,0 @@
-(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.parse = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i ref; i = 1 <= ref ? ++j : --j) {
- source = arguments[i];
-
- if (exports.is_object_literal(source)) {
- if (!exports.is_object_literal(target)) {
- target = {};
- }
-
- for (name in source) {
- if (name === '__proto__') {
- continue;
- }
-
- target[name] = exports.mutate(target[name], source[name]);
- }
- } else if (Array.isArray(source)) {
- target = function () {
- var k, len, results;
- results = [];
-
- for (k = 0, len = source.length; k < len; k++) {
- v = source[k];
- results.push(exports.mutate(void 0, v));
- }
-
- return results;
- }();
- } else if (source !== void 0) {
- target = source;
- }
- }
-
- return target;
-};
-
-exports.snake_case = function snake_case(source) {
- var convert = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
- var name, src, target, u;
- target = {};
-
- if (exports.is_object_literal(source)) {
- u = typeof convert === 'number' && convert > 0 ? convert - 1 : convert;
-
- for (name in source) {
- src = source[name];
-
- if (convert) {
- name = _snake_case(name);
- }
-
- target[name] = exports.snake_case(src, u);
- }
- } else {
- target = source;
- }
-
- return target;
-};
-
-exports.compare = function compare(el1, el2) {
- var i, j, k, key, keys1, keys2, len, ref;
-
- if (exports.is_object_literal(el1)) {
- if (!exports.is_object_literal(el2)) {
- return false;
- }
-
- keys1 = Object.keys(el1).sort();
- keys2 = Object.keys(el2).sort();
-
- if (keys1.length !== keys2.length) {
- return false;
- }
-
- for (i = j = 0, len = keys1.length; j < len; i = ++j) {
- key = keys1[i];
-
- if (key !== keys2[i]) {
- return false;
- }
-
- if (!exports.compare(el1[key], el2[key])) {
- return false;
- }
- }
- } else if (Array.isArray(el1)) {
- if (!Array.isArray(el2)) {
- return false;
- }
-
- if (el1.length !== el2.length) {
- return false;
- }
-
- for (i = k = 0, ref = el1.length; 0 <= ref ? k < ref : k > ref; i = 0 <= ref ? ++k : --k) {
- if (!exports.compare(el1[i], el2[i])) {
- return false;
- }
- }
- } else {
- if (el1 !== el2) {
- return false;
- }
- }
-
- return true;
-};
-
-_snake_case = function _snake_case(str) {
- return str.replace(/([A-Z])/g, function (_, match, index) {
- return '_' + match.toLowerCase();
- });
-};
-
-exports.is_object = function is_object(obj) {
- return obj && _typeof(obj) === 'object' && !Array.isArray(obj);
-};
-
-exports.is_object_literal = function is_object_literal(obj) {
- var test;
- test = obj;
-
- if (_typeof(obj) !== 'object' || obj === null) {
- return false;
- } else {
- if (Object.getPrototypeOf(test) === null) {
- return true;
- }
-
- while (!false) {
- if (Object.getPrototypeOf(test = Object.getPrototypeOf(test)) === null) {
- break;
- }
- }
-
- return Object.getPrototypeOf(obj) === test;
- }
-};
-
-},{}],2:[function(require,module,exports){
-(function (Buffer){(function (){
-
-/*
-CSV Generate - main module
-
-Please look at the [project documentation](https://csv.js.org/generate/) for
-additional information.
-*/
-
-const stream = require('stream')
-const util = require('util')
-
-module.exports = function(){
- let options
- let callback
- if(arguments.length === 2){
- options = arguments[0]
- callback = arguments[1]
- }else if(arguments.length === 1){
- if(typeof arguments[0] === 'function'){
- options = {}
- callback = arguments[0]
- }else{
- options = arguments[0]
- }
- }else if(arguments.length === 0){
- options = {}
- }
- const generator = new Generator(options)
- if(callback){
- const data = []
- generator.on('readable', function(){
- let d; while(d = generator.read()){
- data.push(d)
- }
- })
- generator.on('error', callback)
- generator.on('end', function(){
- if(generator.options.objectMode){
- callback(null, data)
- }else{
- if(generator.options.encoding){
- callback(null, data.join(''))
- }else{
- callback(null, Buffer.concat(data))
- }
- }
- })
- }
- return generator
-}
-
-Generator = function(options = {}){
- // Convert Stream Readable options if underscored
- if(options.high_water_mark){
- options.highWaterMark = options.high_water_mark
- }
- if(options.object_mode){
- options.objectMode = options.object_mode
- }
- // Call parent constructor
- stream.Readable.call(this, options)
- // Clone and camelize options
- this.options = {}
- for(let k in options){
- this.options[Generator.camelize(k)] = options[k]
- }
- // Normalize options
- const dft = {
- columns: 8,
- delimiter: ',',
- duration: null,
- encoding: null,
- end: null,
- eof: false,
- fixedSize: false,
- length: -1,
- maxWordLength: 16,
- rowDelimiter: '\n',
- seed: false,
- sleep: 0,
- }
- for(const k in dft){
- if(this.options[k] === undefined){
- this.options[k] = dft[k]
- }
- }
- // Default values
- if(this.options.eof === true){
- this.options.eof = this.options.rowDelimiter
- }
- // State
- this._ = {
- start_time: this.options.duration ? Date.now() : null,
- fixed_size_buffer: '',
- count_written: 0,
- count_created: 0,
- }
- if(typeof this.options.columns === 'number'){
- this.options.columns = new Array(this.options.columns)
- }
- const accepted_header_types = Object.keys(Generator).filter( (t) => ( !['super_', 'camelize'].includes(t) ))
- for(let i = 0; i < this.options.columns.length; i++){
- const v = this.options.columns[i] || 'ascii'
- if(typeof v === 'string'){
- if(!accepted_header_types.includes(v)){
- throw Error(`Invalid column type: got "${v}", default values are ${JSON.stringify(accepted_header_types)}`)
- }
- this.options.columns[i] = Generator[v]
- }
- }
- return this
-}
-util.inherits(Generator, stream.Readable)
-// Export the class
-module.exports.Generator = Generator
-// Generate a random number between 0 and 1 with 2 decimals. The function is idempotent if it detect the "seed" option.
-Generator.prototype.random = function(){
- if(this.options.seed){
- return this.options.seed = this.options.seed * Math.PI * 100 % 100 / 100
- }else{
- return Math.random()
- }
-}
-// Stop the generation.
-Generator.prototype.end = function(){
- this.push(null)
-}
-// Put new data into the read queue.
-Generator.prototype._read = function(size){
- // Already started
- const data = []
- let length = this._.fixed_size_buffer.length
- if(length !== 0){
- data.push(this._.fixed_size_buffer)
- }
- while(true){
- // Time for some rest: flush first and stop later
- if( (this._.count_created === this.options.length) || (this.options.end && Date.now() > this.options.end) || (this.options.duration && Date.now() > this._.start_time + this.options.duration) ){
- // Flush
- if(data.length){
- if(this.options.objectMode){
- for(const line of data){
- this.__push(line)
- }
- }else{
- this.__push(data.join('') + (this.options.eof ? this.options.eof : ''))
- }
- }
- // Stop
- return this.push(null)
- }
- // Create the line
- let line = []
- let lineLength
- this.options.columns.forEach((fn) => {
- line.push(fn(this))
- })
- // for(const header in this.options.columns){
- // // Create the field
- // line.push(header(this))
- // }
- // Obtain line length
- if(this.options.objectMode){
- lineLength = 0
- for(const column of line)
- lineLength += column.length
- }else{
- // Stringify the line
- line = (this._.count_created === 0 ? '' : this.options.rowDelimiter)+line.join(this.options.delimiter)
- lineLength = line.length
- }
- this._.count_created++
- if(length + lineLength > size){
- if(this.options.objectMode){
- data.push(line)
- for(const line of data){
- this.__push(line)
- }
- }else{
- if(this.options.fixedSize){
- this._.fixed_size_buffer = line.substr(size - length)
- data.push(line.substr(0, size - length))
- }else{
- data.push(line)
- }
- this.__push(data.join(''))
- }
- return
- }
- length += lineLength
- data.push(line)
- }
-}
-// Put new data into the read queue.
-Generator.prototype.__push = function(record){
- this._.count_written++
- if(this.options.sleep > 0){
- setTimeout( () => {
- this.push(record)
- }, this.options.sleep)
- }else{
- this.push(record)
- }
-}
-// Generate an ASCII value.
-Generator.ascii = function(gen){
- // Column
- const column = []
- const nb_chars = Math.ceil(gen.random() * gen.options.maxWordLength)
- for(let i=0; i= this.size){
- this.resize()
- if(length >= this.size){
- throw Error('INVALID_BUFFER_STATE')
- }
- }
- const buf = this.buf
- this.buf = Buffer.alloc(this.size)
- val.copy(this.buf, 0)
- buf.copy(this.buf, val.length)
- this.length += val.length
- }else{
- const length = this.length++
- if(length === this.size){
- this.resize()
- }
- const buf = this.clone()
- this.buf[0] = val
- buf.copy(this.buf,1, 0, length)
- }
- }
- append(val){
- const length = this.length++
- if(length === this.size){
- this.resize()
- }
- this.buf[length] = val
- }
- clone(){
- return Buffer.from(this.buf.slice(0, this.length))
- }
- resize(){
- const length = this.length
- this.size = this.size * 2
- const buf = Buffer.alloc(this.size)
- this.buf.copy(buf,0, 0, length)
- this.buf = buf
- }
- toString(encoding){
- if(encoding){
- return this.buf.slice(0, this.length).toString(encoding)
- }else{
- return Uint8Array.prototype.slice.call(this.buf.slice(0, this.length))
- }
- }
- toJSON(){
- return this.toString('utf8')
- }
- reset(){
- this.length = 0
- }
-}
-
-module.exports = ResizeableBuffer
-
-}).call(this)}).call(this,require("buffer").Buffer)
-},{"buffer":10}],4:[function(require,module,exports){
-(function (Buffer,setImmediate){(function (){
-
-/*
-CSV Parse
-
-Please look at the [project documentation](https://csv.js.org/parse/) for
-additional information.
-*/
-
-const { Transform } = require('stream')
-const ResizeableBuffer = require('./ResizeableBuffer')
-
-// white space characters
-// https://en.wikipedia.org/wiki/Whitespace_character
-// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions/Character_Classes#Types
-// \f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff
-const tab = 9
-const nl = 10 // \n, 0x0A in hexadecimal, 10 in decimal
-const np = 12
-const cr = 13 // \r, 0x0D in hexadécimal, 13 in decimal
-const space = 32
-const boms = {
- // Note, the following are equals:
- // Buffer.from("\ufeff")
- // Buffer.from([239, 187, 191])
- // Buffer.from('EFBBBF', 'hex')
- 'utf8': Buffer.from([239, 187, 191]),
- // Note, the following are equals:
- // Buffer.from "\ufeff", 'utf16le
- // Buffer.from([255, 254])
- 'utf16le': Buffer.from([255, 254])
-}
-
-class Parser extends Transform {
- constructor(opts = {}){
- super({...{readableObjectMode: true}, ...opts, encoding: null})
- this.__originalOptions = opts
- this.__normalizeOptions(opts)
- }
- __normalizeOptions(opts){
- const options = {}
- // Merge with user options
- for(let opt in opts){
- options[underscore(opt)] = opts[opt]
- }
- // Normalize option `encoding`
- // Note: defined first because other options depends on it
- // to convert chars/strings into buffers.
- if(options.encoding === undefined || options.encoding === true){
- options.encoding = 'utf8'
- }else if(options.encoding === null || options.encoding === false){
- options.encoding = null
- }else if(typeof options.encoding !== 'string' && options.encoding !== null){
- throw new CsvError('CSV_INVALID_OPTION_ENCODING', [
- 'Invalid option encoding:',
- 'encoding must be a string or null to return a buffer,',
- `got ${JSON.stringify(options.encoding)}`
- ], options)
- }
- // Normalize option `bom`
- if(options.bom === undefined || options.bom === null || options.bom === false){
- options.bom = false
- }else if(options.bom !== true){
- throw new CsvError('CSV_INVALID_OPTION_BOM', [
- 'Invalid option bom:', 'bom must be true,',
- `got ${JSON.stringify(options.bom)}`
- ], options)
- }
- // Normalize option `cast`
- let fnCastField = null
- if(options.cast === undefined || options.cast === null || options.cast === false || options.cast === ''){
- options.cast = undefined
- }else if(typeof options.cast === 'function'){
- fnCastField = options.cast
- options.cast = true
- }else if(options.cast !== true){
- throw new CsvError('CSV_INVALID_OPTION_CAST', [
- 'Invalid option cast:', 'cast must be true or a function,',
- `got ${JSON.stringify(options.cast)}`
- ], options)
- }
- // Normalize option `cast_date`
- if(options.cast_date === undefined || options.cast_date === null || options.cast_date === false || options.cast_date === ''){
- options.cast_date = false
- }else if(options.cast_date === true){
- options.cast_date = function(value){
- const date = Date.parse(value)
- return !isNaN(date) ? new Date(date) : value
- }
- }else if(typeof options.cast_date !== 'function'){
- throw new CsvError('CSV_INVALID_OPTION_CAST_DATE', [
- 'Invalid option cast_date:', 'cast_date must be true or a function,',
- `got ${JSON.stringify(options.cast_date)}`
- ], options)
- }
- // Normalize option `columns`
- let fnFirstLineToHeaders = null
- if(options.columns === true){
- // Fields in the first line are converted as-is to columns
- fnFirstLineToHeaders = undefined
- }else if(typeof options.columns === 'function'){
- fnFirstLineToHeaders = options.columns
- options.columns = true
- }else if(Array.isArray(options.columns)){
- options.columns = normalizeColumnsArray(options.columns)
- }else if(options.columns === undefined || options.columns === null || options.columns === false){
- options.columns = false
- }else{
- throw new CsvError('CSV_INVALID_OPTION_COLUMNS', [
- 'Invalid option columns:',
- 'expect an array, a function or true,',
- `got ${JSON.stringify(options.columns)}`
- ], options)
- }
- // Normalize option `columns_duplicates_to_array`
- if(options.columns_duplicates_to_array === undefined || options.columns_duplicates_to_array === null || options.columns_duplicates_to_array === false){
- options.columns_duplicates_to_array = false
- }else if(options.columns_duplicates_to_array !== true){
- throw new CsvError('CSV_INVALID_OPTION_COLUMNS_DUPLICATES_TO_ARRAY', [
- 'Invalid option columns_duplicates_to_array:',
- 'expect an boolean,',
- `got ${JSON.stringify(options.columns_duplicates_to_array)}`
- ], options)
- }else if(options.columns === false){
- throw new CsvError('CSV_INVALID_OPTION_COLUMNS_DUPLICATES_TO_ARRAY', [
- 'Invalid option columns_duplicates_to_array:',
- 'the `columns` mode must be activated.'
- ], options)
- }
- // Normalize option `comment`
- if(options.comment === undefined || options.comment === null || options.comment === false || options.comment === ''){
- options.comment = null
- }else{
- if(typeof options.comment === 'string'){
- options.comment = Buffer.from(options.comment, options.encoding)
- }
- if(!Buffer.isBuffer(options.comment)){
- throw new CsvError('CSV_INVALID_OPTION_COMMENT', [
- 'Invalid option comment:',
- 'comment must be a buffer or a string,',
- `got ${JSON.stringify(options.comment)}`
- ], options)
- }
- }
- // Normalize option `delimiter`
- const delimiter_json = JSON.stringify(options.delimiter)
- if(!Array.isArray(options.delimiter)) options.delimiter = [options.delimiter]
- if(options.delimiter.length === 0){
- throw new CsvError('CSV_INVALID_OPTION_DELIMITER', [
- 'Invalid option delimiter:',
- 'delimiter must be a non empty string or buffer or array of string|buffer,',
- `got ${delimiter_json}`
- ], options)
- }
- options.delimiter = options.delimiter.map(function(delimiter){
- if(delimiter === undefined || delimiter === null || delimiter === false){
- return Buffer.from(',', options.encoding)
- }
- if(typeof delimiter === 'string'){
- delimiter = Buffer.from(delimiter, options.encoding)
- }
- if( !Buffer.isBuffer(delimiter) || delimiter.length === 0){
- throw new CsvError('CSV_INVALID_OPTION_DELIMITER', [
- 'Invalid option delimiter:',
- 'delimiter must be a non empty string or buffer or array of string|buffer,',
- `got ${delimiter_json}`
- ], options)
- }
- return delimiter
- })
- // Normalize option `escape`
- if(options.escape === undefined || options.escape === true){
- options.escape = Buffer.from('"', options.encoding)
- }else if(typeof options.escape === 'string'){
- options.escape = Buffer.from(options.escape, options.encoding)
- }else if (options.escape === null || options.escape === false){
- options.escape = null
- }
- if(options.escape !== null){
- if(!Buffer.isBuffer(options.escape)){
- throw new Error(`Invalid Option: escape must be a buffer, a string or a boolean, got ${JSON.stringify(options.escape)}`)
- }
- }
- // Normalize option `from`
- if(options.from === undefined || options.from === null){
- options.from = 1
- }else{
- if(typeof options.from === 'string' && /\d+/.test(options.from)){
- options.from = parseInt(options.from)
- }
- if(Number.isInteger(options.from)){
- if(options.from < 0){
- throw new Error(`Invalid Option: from must be a positive integer, got ${JSON.stringify(opts.from)}`)
- }
- }else{
- throw new Error(`Invalid Option: from must be an integer, got ${JSON.stringify(options.from)}`)
- }
- }
- // Normalize option `from_line`
- if(options.from_line === undefined || options.from_line === null){
- options.from_line = 1
- }else{
- if(typeof options.from_line === 'string' && /\d+/.test(options.from_line)){
- options.from_line = parseInt(options.from_line)
- }
- if(Number.isInteger(options.from_line)){
- if(options.from_line <= 0){
- throw new Error(`Invalid Option: from_line must be a positive integer greater than 0, got ${JSON.stringify(opts.from_line)}`)
- }
- }else{
- throw new Error(`Invalid Option: from_line must be an integer, got ${JSON.stringify(opts.from_line)}`)
- }
- }
- // Normalize options `ignore_last_delimiters`
- if(options.ignore_last_delimiters === undefined || options.ignore_last_delimiters === null){
- options.ignore_last_delimiters = false
- }else if(typeof options.ignore_last_delimiters === 'number'){
- options.ignore_last_delimiters = Math.floor(options.ignore_last_delimiters)
- if(options.ignore_last_delimiters === 0){
- options.ignore_last_delimiters = false
- }
- }else if(typeof options.ignore_last_delimiters !== 'boolean'){
- throw new CsvError('CSV_INVALID_OPTION_IGNORE_LAST_DELIMITERS', [
- 'Invalid option `ignore_last_delimiters`:',
- 'the value must be a boolean value or an integer,',
- `got ${JSON.stringify(options.ignore_last_delimiters)}`
- ], options)
- }
- if(options.ignore_last_delimiters === true && options.columns === false){
- throw new CsvError('CSV_IGNORE_LAST_DELIMITERS_REQUIRES_COLUMNS', [
- 'The option `ignore_last_delimiters`',
- 'requires the activation of the `columns` option'
- ], options)
- }
- // Normalize option `info`
- if(options.info === undefined || options.info === null || options.info === false){
- options.info = false
- }else if(options.info !== true){
- throw new Error(`Invalid Option: info must be true, got ${JSON.stringify(options.info)}`)
- }
- // Normalize option `max_record_size`
- if(options.max_record_size === undefined || options.max_record_size === null || options.max_record_size === false){
- options.max_record_size = 0
- }else if(Number.isInteger(options.max_record_size) && options.max_record_size >= 0){
- // Great, nothing to do
- }else if(typeof options.max_record_size === 'string' && /\d+/.test(options.max_record_size)){
- options.max_record_size = parseInt(options.max_record_size)
- }else{
- throw new Error(`Invalid Option: max_record_size must be a positive integer, got ${JSON.stringify(options.max_record_size)}`)
- }
- // Normalize option `objname`
- if(options.objname === undefined || options.objname === null || options.objname === false){
- options.objname = undefined
- }else if(Buffer.isBuffer(options.objname)){
- if(options.objname.length === 0){
- throw new Error(`Invalid Option: objname must be a non empty buffer`)
- }
- if(options.encoding === null){
- // Don't call `toString`, leave objname as a buffer
- }else{
- options.objname = options.objname.toString(options.encoding)
- }
- }else if(typeof options.objname === 'string'){
- if(options.objname.length === 0){
- throw new Error(`Invalid Option: objname must be a non empty string`)
- }
- // Great, nothing to do
- }else{
- throw new Error(`Invalid Option: objname must be a string or a buffer, got ${options.objname}`)
- }
- // Normalize option `on_record`
- if(options.on_record === undefined || options.on_record === null){
- options.on_record = undefined
- }else if(typeof options.on_record !== 'function'){
- throw new CsvError('CSV_INVALID_OPTION_ON_RECORD', [
- 'Invalid option `on_record`:',
- 'expect a function,',
- `got ${JSON.stringify(options.on_record)}`
- ], options)
- }
- // Normalize option `quote`
- if(options.quote === null || options.quote === false || options.quote === ''){
- options.quote = null
- }else{
- if(options.quote === undefined || options.quote === true){
- options.quote = Buffer.from('"', options.encoding)
- }else if(typeof options.quote === 'string'){
- options.quote = Buffer.from(options.quote, options.encoding)
- }
- if(!Buffer.isBuffer(options.quote)){
- throw new Error(`Invalid Option: quote must be a buffer or a string, got ${JSON.stringify(options.quote)}`)
- }
- }
- // Normalize option `raw`
- if(options.raw === undefined || options.raw === null || options.raw === false){
- options.raw = false
- }else if(options.raw !== true){
- throw new Error(`Invalid Option: raw must be true, got ${JSON.stringify(options.raw)}`)
- }
- // Normalize option `record_delimiter`
- if(!options.record_delimiter){
- options.record_delimiter = []
- }else if(!Array.isArray(options.record_delimiter)){
- options.record_delimiter = [options.record_delimiter]
- }
- options.record_delimiter = options.record_delimiter.map( function(rd){
- if(typeof rd === 'string'){
- rd = Buffer.from(rd, options.encoding)
- }
- return rd
- })
- // Normalize option `relax`
- if(typeof options.relax === 'boolean'){
- // Great, nothing to do
- }else if(options.relax === undefined || options.relax === null){
- options.relax = false
- }else{
- throw new Error(`Invalid Option: relax must be a boolean, got ${JSON.stringify(options.relax)}`)
- }
- // Normalize option `relax_column_count`
- if(typeof options.relax_column_count === 'boolean'){
- // Great, nothing to do
- }else if(options.relax_column_count === undefined || options.relax_column_count === null){
- options.relax_column_count = false
- }else{
- throw new Error(`Invalid Option: relax_column_count must be a boolean, got ${JSON.stringify(options.relax_column_count)}`)
- }
- if(typeof options.relax_column_count_less === 'boolean'){
- // Great, nothing to do
- }else if(options.relax_column_count_less === undefined || options.relax_column_count_less === null){
- options.relax_column_count_less = false
- }else{
- throw new Error(`Invalid Option: relax_column_count_less must be a boolean, got ${JSON.stringify(options.relax_column_count_less)}`)
- }
- if(typeof options.relax_column_count_more === 'boolean'){
- // Great, nothing to do
- }else if(options.relax_column_count_more === undefined || options.relax_column_count_more === null){
- options.relax_column_count_more = false
- }else{
- throw new Error(`Invalid Option: relax_column_count_more must be a boolean, got ${JSON.stringify(options.relax_column_count_more)}`)
- }
- // Normalize option `skip_empty_lines`
- if(typeof options.skip_empty_lines === 'boolean'){
- // Great, nothing to do
- }else if(options.skip_empty_lines === undefined || options.skip_empty_lines === null){
- options.skip_empty_lines = false
- }else{
- throw new Error(`Invalid Option: skip_empty_lines must be a boolean, got ${JSON.stringify(options.skip_empty_lines)}`)
- }
- // Normalize option `skip_lines_with_empty_values`
- if(typeof options.skip_lines_with_empty_values === 'boolean'){
- // Great, nothing to do
- }else if(options.skip_lines_with_empty_values === undefined || options.skip_lines_with_empty_values === null){
- options.skip_lines_with_empty_values = false
- }else{
- throw new Error(`Invalid Option: skip_lines_with_empty_values must be a boolean, got ${JSON.stringify(options.skip_lines_with_empty_values)}`)
- }
- // Normalize option `skip_lines_with_error`
- if(typeof options.skip_lines_with_error === 'boolean'){
- // Great, nothing to do
- }else if(options.skip_lines_with_error === undefined || options.skip_lines_with_error === null){
- options.skip_lines_with_error = false
- }else{
- throw new Error(`Invalid Option: skip_lines_with_error must be a boolean, got ${JSON.stringify(options.skip_lines_with_error)}`)
- }
- // Normalize option `rtrim`
- if(options.rtrim === undefined || options.rtrim === null || options.rtrim === false){
- options.rtrim = false
- }else if(options.rtrim !== true){
- throw new Error(`Invalid Option: rtrim must be a boolean, got ${JSON.stringify(options.rtrim)}`)
- }
- // Normalize option `ltrim`
- if(options.ltrim === undefined || options.ltrim === null || options.ltrim === false){
- options.ltrim = false
- }else if(options.ltrim !== true){
- throw new Error(`Invalid Option: ltrim must be a boolean, got ${JSON.stringify(options.ltrim)}`)
- }
- // Normalize option `trim`
- if(options.trim === undefined || options.trim === null || options.trim === false){
- options.trim = false
- }else if(options.trim !== true){
- throw new Error(`Invalid Option: trim must be a boolean, got ${JSON.stringify(options.trim)}`)
- }
- // Normalize options `trim`, `ltrim` and `rtrim`
- if(options.trim === true && opts.ltrim !== false){
- options.ltrim = true
- }else if(options.ltrim !== true){
- options.ltrim = false
- }
- if(options.trim === true && opts.rtrim !== false){
- options.rtrim = true
- }else if(options.rtrim !== true){
- options.rtrim = false
- }
- // Normalize option `to`
- if(options.to === undefined || options.to === null){
- options.to = -1
- }else{
- if(typeof options.to === 'string' && /\d+/.test(options.to)){
- options.to = parseInt(options.to)
- }
- if(Number.isInteger(options.to)){
- if(options.to <= 0){
- throw new Error(`Invalid Option: to must be a positive integer greater than 0, got ${JSON.stringify(opts.to)}`)
- }
- }else{
- throw new Error(`Invalid Option: to must be an integer, got ${JSON.stringify(opts.to)}`)
- }
- }
- // Normalize option `to_line`
- if(options.to_line === undefined || options.to_line === null){
- options.to_line = -1
- }else{
- if(typeof options.to_line === 'string' && /\d+/.test(options.to_line)){
- options.to_line = parseInt(options.to_line)
- }
- if(Number.isInteger(options.to_line)){
- if(options.to_line <= 0){
- throw new Error(`Invalid Option: to_line must be a positive integer greater than 0, got ${JSON.stringify(opts.to_line)}`)
- }
- }else{
- throw new Error(`Invalid Option: to_line must be an integer, got ${JSON.stringify(opts.to_line)}`)
- }
- }
- this.info = {
- bytes: 0,
- comment_lines: 0,
- empty_lines: 0,
- invalid_field_length: 0,
- lines: 1,
- records: 0
- }
- this.options = options
- this.state = {
- bomSkipped: false,
- bufBytesStart: 0,
- castField: fnCastField,
- commenting: false,
- // Current error encountered by a record
- error: undefined,
- enabled: options.from_line === 1,
- escaping: false,
- // escapeIsQuote: options.escape === options.quote,
- escapeIsQuote: Buffer.isBuffer(options.escape) && Buffer.isBuffer(options.quote) && Buffer.compare(options.escape, options.quote) === 0,
- // columns can be `false`, `true`, `Array`
- expectedRecordLength: Array.isArray(options.columns) ? options.columns.length : undefined,
- field: new ResizeableBuffer(20),
- firstLineToHeaders: fnFirstLineToHeaders,
- needMoreDataSize: Math.max(
- // Skip if the remaining buffer smaller than comment
- options.comment !== null ? options.comment.length : 0,
- // Skip if the remaining buffer can be delimiter
- ...options.delimiter.map( (delimiter) => delimiter.length),
- // Skip if the remaining buffer can be escape sequence
- options.quote !== null ? options.quote.length : 0,
- ),
- previousBuf: undefined,
- quoting: false,
- stop: false,
- rawBuffer: new ResizeableBuffer(100),
- record: [],
- recordHasError: false,
- record_length: 0,
- recordDelimiterMaxLength: options.record_delimiter.length === 0 ? 2 : Math.max(...options.record_delimiter.map( (v) => v.length)),
- trimChars: [Buffer.from(' ', options.encoding)[0], Buffer.from('\t', options.encoding)[0]],
- wasQuoting: false,
- wasRowDelimiter: false
- }
- }
- // Implementation of `Transform._transform`
- _transform(buf, encoding, callback){
- if(this.state.stop === true){
- return
- }
- const err = this.__parse(buf, false)
- if(err !== undefined){
- this.state.stop = true
- }
- callback(err)
- }
- // Implementation of `Transform._flush`
- _flush(callback){
- if(this.state.stop === true){
- return
- }
- const err = this.__parse(undefined, true)
- callback(err)
- }
- // Central parser implementation
- __parse(nextBuf, end){
- const {bom, comment, escape, from_line, ltrim, max_record_size, quote, raw, relax, rtrim, skip_empty_lines, to, to_line} = this.options
- let {record_delimiter} = this.options
- const {bomSkipped, previousBuf, rawBuffer, escapeIsQuote} = this.state
- let buf
- if(previousBuf === undefined){
- if(nextBuf === undefined){
- // Handle empty string
- this.push(null)
- return
- }else{
- buf = nextBuf
- }
- }else if(previousBuf !== undefined && nextBuf === undefined){
- buf = previousBuf
- }else{
- buf = Buffer.concat([previousBuf, nextBuf])
- }
- // Handle UTF BOM
- if(bomSkipped === false){
- if(bom === false){
- this.state.bomSkipped = true
- }else if(buf.length < 3){
- // No enough data
- if(end === false){
- // Wait for more data
- this.state.previousBuf = buf
- return
- }
- }else{
- for(let encoding in boms){
- if(boms[encoding].compare(buf, 0, boms[encoding].length) === 0){
- // Skip BOM
- let bomLength = boms[encoding].length
- this.state.bufBytesStart += bomLength
- buf = buf.slice(bomLength)
- // Renormalize original options with the new encoding
- this.__normalizeOptions({...this.__originalOptions, encoding: encoding})
- break
- }
- }
- this.state.bomSkipped = true
- }
- }
- const bufLen = buf.length
- let pos
- for(pos = 0; pos < bufLen; pos++){
- // Ensure we get enough space to look ahead
- // There should be a way to move this out of the loop
- if(this.__needMoreData(pos, bufLen, end)){
- break
- }
- if(this.state.wasRowDelimiter === true){
- this.info.lines++
- this.state.wasRowDelimiter = false
- }
- if(to_line !== -1 && this.info.lines > to_line){
- this.state.stop = true
- this.push(null)
- return
- }
- // Auto discovery of record_delimiter, unix, mac and windows supported
- if(this.state.quoting === false && record_delimiter.length === 0){
- const record_delimiterCount = this.__autoDiscoverRecordDelimiter(buf, pos)
- if(record_delimiterCount){
- record_delimiter = this.options.record_delimiter
- }
- }
- const chr = buf[pos]
- if(raw === true){
- rawBuffer.append(chr)
- }
- if((chr === cr || chr === nl) && this.state.wasRowDelimiter === false ){
- this.state.wasRowDelimiter = true
- }
- // Previous char was a valid escape char
- // treat the current char as a regular char
- if(this.state.escaping === true){
- this.state.escaping = false
- }else{
- // Escape is only active inside quoted fields
- // We are quoting, the char is an escape chr and there is a chr to escape
- // if(escape !== null && this.state.quoting === true && chr === escape && pos + 1 < bufLen){
- if(escape !== null && this.state.quoting === true && this.__isEscape(buf, pos, chr) && pos + escape.length < bufLen){
- if(escapeIsQuote){
- if(this.__isQuote(buf, pos+escape.length)){
- this.state.escaping = true
- pos += escape.length - 1
- continue
- }
- }else{
- this.state.escaping = true
- pos += escape.length - 1
- continue
- }
- }
- // Not currently escaping and chr is a quote
- // TODO: need to compare bytes instead of single char
- if(this.state.commenting === false && this.__isQuote(buf, pos)){
- if(this.state.quoting === true){
- const nextChr = buf[pos+quote.length]
- const isNextChrTrimable = rtrim && this.__isCharTrimable(nextChr)
- const isNextChrComment = comment !== null && this.__compareBytes(comment, buf, pos+quote.length, nextChr)
- const isNextChrDelimiter = this.__isDelimiter(buf, pos+quote.length, nextChr)
- const isNextChrRecordDelimiter = record_delimiter.length === 0 ? this.__autoDiscoverRecordDelimiter(buf, pos+quote.length) : this.__isRecordDelimiter(nextChr, buf, pos+quote.length)
- // Escape a quote
- // Treat next char as a regular character
- if(escape !== null && this.__isEscape(buf, pos, chr) && this.__isQuote(buf, pos + escape.length)){
- pos += escape.length - 1
- }else if(!nextChr || isNextChrDelimiter || isNextChrRecordDelimiter || isNextChrComment || isNextChrTrimable){
- this.state.quoting = false
- this.state.wasQuoting = true
- pos += quote.length - 1
- continue
- }else if(relax === false){
- const err = this.__error(
- new CsvError('CSV_INVALID_CLOSING_QUOTE', [
- 'Invalid Closing Quote:',
- `got "${String.fromCharCode(nextChr)}"`,
- `at line ${this.info.lines}`,
- 'instead of delimiter, record delimiter, trimable character',
- '(if activated) or comment',
- ], this.options, this.__infoField())
- )
- if(err !== undefined) return err
- }else{
- this.state.quoting = false
- this.state.wasQuoting = true
- this.state.field.prepend(quote)
- pos += quote.length - 1
- }
- }else{
- if(this.state.field.length !== 0){
- // In relax mode, treat opening quote preceded by chrs as regular
- if( relax === false ){
- const err = this.__error(
- new CsvError('INVALID_OPENING_QUOTE', [
- 'Invalid Opening Quote:',
- `a quote is found inside a field at line ${this.info.lines}`,
- ], this.options, this.__infoField(), {
- field: this.state.field,
- })
- )
- if(err !== undefined) return err
- }
- }else{
- this.state.quoting = true
- pos += quote.length - 1
- continue
- }
- }
- }
- if(this.state.quoting === false){
- let recordDelimiterLength = this.__isRecordDelimiter(chr, buf, pos)
- if(recordDelimiterLength !== 0){
- // Do not emit comments which take a full line
- const skipCommentLine = this.state.commenting && (this.state.wasQuoting === false && this.state.record.length === 0 && this.state.field.length === 0)
- if(skipCommentLine){
- this.info.comment_lines++
- // Skip full comment line
- }else{
- // Activate records emition if above from_line
- if(this.state.enabled === false && this.info.lines + (this.state.wasRowDelimiter === true ? 1: 0) >= from_line){
- this.state.enabled = true
- this.__resetField()
- this.__resetRecord()
- pos += recordDelimiterLength - 1
- continue
- }
- // Skip if line is empty and skip_empty_lines activated
- if(skip_empty_lines === true && this.state.wasQuoting === false && this.state.record.length === 0 && this.state.field.length === 0){
- this.info.empty_lines++
- pos += recordDelimiterLength - 1
- continue
- }
- this.info.bytes = this.state.bufBytesStart + pos;
- const errField = this.__onField()
- if(errField !== undefined) return errField
- this.info.bytes = this.state.bufBytesStart + pos + recordDelimiterLength;
- const errRecord = this.__onRecord()
- if(errRecord !== undefined) return errRecord
- if(to !== -1 && this.info.records >= to){
- this.state.stop = true
- this.push(null)
- return
- }
- }
- this.state.commenting = false
- pos += recordDelimiterLength - 1
- continue
- }
- if(this.state.commenting){
- continue
- }
- const commentCount = comment === null ? 0 : this.__compareBytes(comment, buf, pos, chr)
- if(commentCount !== 0){
- this.state.commenting = true
- continue
- }
- let delimiterLength = this.__isDelimiter(buf, pos, chr)
- if(delimiterLength !== 0){
- this.info.bytes = this.state.bufBytesStart + pos;
- const errField = this.__onField()
- if(errField !== undefined) return errField
- pos += delimiterLength - 1
- continue
- }
- }
- }
- if(this.state.commenting === false){
- if(max_record_size !== 0 && this.state.record_length + this.state.field.length > max_record_size){
- const err = this.__error(
- new CsvError('CSV_MAX_RECORD_SIZE', [
- 'Max Record Size:',
- 'record exceed the maximum number of tolerated bytes',
- `of ${max_record_size}`,
- `at line ${this.info.lines}`,
- ], this.options, this.__infoField())
- )
- if(err !== undefined) return err
- }
- }
- const lappend = ltrim === false || this.state.quoting === true || this.state.field.length !== 0 || !this.__isCharTrimable(chr)
- // rtrim in non quoting is handle in __onField
- const rappend = rtrim === false || this.state.wasQuoting === false
- if( lappend === true && rappend === true ){
- this.state.field.append(chr)
- }else if(rtrim === true && !this.__isCharTrimable(chr)){
- const err = this.__error(
- new CsvError('CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE', [
- 'Invalid Closing Quote:',
- 'found non trimable byte after quote',
- `at line ${this.info.lines}`,
- ], this.options, this.__infoField())
- )
- if(err !== undefined) return err
- }
- }
- if(end === true){
- // Ensure we are not ending in a quoting state
- if(this.state.quoting === true){
- const err = this.__error(
- new CsvError('CSV_QUOTE_NOT_CLOSED', [
- 'Quote Not Closed:',
- `the parsing is finished with an opening quote at line ${this.info.lines}`,
- ], this.options, this.__infoField())
- )
- if(err !== undefined) return err
- }else{
- // Skip last line if it has no characters
- if(this.state.wasQuoting === true || this.state.record.length !== 0 || this.state.field.length !== 0){
- this.info.bytes = this.state.bufBytesStart + pos;
- const errField = this.__onField()
- if(errField !== undefined) return errField
- const errRecord = this.__onRecord()
- if(errRecord !== undefined) return errRecord
- }else if(this.state.wasRowDelimiter === true){
- this.info.empty_lines++
- }else if(this.state.commenting === true){
- this.info.comment_lines++
- }
- }
- }else{
- this.state.bufBytesStart += pos
- this.state.previousBuf = buf.slice(pos)
- }
- if(this.state.wasRowDelimiter === true){
- this.info.lines++
- this.state.wasRowDelimiter = false
- }
- }
- __onRecord(){
- const {columns, columns_duplicates_to_array, encoding, info, from, relax_column_count, relax_column_count_less, relax_column_count_more, raw, skip_lines_with_empty_values} = this.options
- const {enabled, record} = this.state
- if(enabled === false){
- return this.__resetRecord()
- }
- // Convert the first line into column names
- const recordLength = record.length
- if(columns === true){
- if(skip_lines_with_empty_values === true && isRecordEmpty(record)){
- this.__resetRecord()
- return
- }
- return this.__firstLineToColumns(record)
- }
- if(columns === false && this.info.records === 0){
- this.state.expectedRecordLength = recordLength
- }
- if(recordLength !== this.state.expectedRecordLength){
- const err = columns === false ?
- // Todo: rename CSV_INCONSISTENT_RECORD_LENGTH to
- // CSV_RECORD_INCONSISTENT_FIELDS_LENGTH
- new CsvError('CSV_INCONSISTENT_RECORD_LENGTH', [
- 'Invalid Record Length:',
- `expect ${this.state.expectedRecordLength},`,
- `got ${recordLength} on line ${this.info.lines}`,
- ], this.options, this.__infoField(), {
- record: record,
- })
- :
- // Todo: rename CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH to
- // CSV_RECORD_INCONSISTENT_COLUMNS
- new CsvError('CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH', [
- 'Invalid Record Length:',
- `columns length is ${columns.length},`, // rename columns
- `got ${recordLength} on line ${this.info.lines}`,
- ], this.options, this.__infoField(), {
- record: record,
- })
- if(relax_column_count === true ||
- (relax_column_count_less === true && recordLength < this.state.expectedRecordLength) ||
- (relax_column_count_more === true && recordLength > this.state.expectedRecordLength) ){
- this.info.invalid_field_length++
- this.state.error = err
- // Error is undefined with skip_lines_with_error
- }else{
- const finalErr = this.__error(err)
- if(finalErr) return finalErr
- }
- }
- if(skip_lines_with_empty_values === true && isRecordEmpty(record)){
- this.__resetRecord()
- return
- }
- if(this.state.recordHasError === true){
- this.__resetRecord()
- this.state.recordHasError = false
- return
- }
- this.info.records++
- if(from === 1 || this.info.records >= from){
- // With columns, records are object
- if(columns !== false){
- const obj = {}
- // Transform record array to an object
- for(let i = 0, l = record.length; i < l; i++){
- if(columns[i] === undefined || columns[i].disabled) continue
- // Turn duplicate columns into an array
- if (columns_duplicates_to_array === true && obj[columns[i].name] !== undefined) {
- if (Array.isArray(obj[columns[i].name])) {
- obj[columns[i].name] = obj[columns[i].name].concat(record[i])
- } else {
- obj[columns[i].name] = [obj[columns[i].name], record[i]]
- }
- } else {
- obj[columns[i].name] = record[i]
- }
- }
- const {objname} = this.options
- // Without objname (default)
- if(objname === undefined){
- if(raw === true || info === true){
- const err = this.__push(Object.assign(
- {record: obj},
- (raw === true ? {raw: this.state.rawBuffer.toString(encoding)}: {}),
- (info === true ? {info: this.__infoRecord()}: {})
- ))
- if(err){
- return err
- }
- }else{
- const err = this.__push(obj)
- if(err){
- return err
- }
- }
- // With objname (default)
- }else{
- if(raw === true || info === true){
- const err = this.__push(Object.assign(
- {record: [obj[objname], obj]},
- raw === true ? {raw: this.state.rawBuffer.toString(encoding)}: {},
- info === true ? {info: this.__infoRecord()}: {}
- ))
- if(err){
- return err
- }
- }else{
- const err = this.__push([obj[objname], obj])
- if(err){
- return err
- }
- }
- }
- // Without columns, records are array
- }else{
- if(raw === true || info === true){
- const err = this.__push(Object.assign(
- {record: record},
- raw === true ? {raw: this.state.rawBuffer.toString(encoding)}: {},
- info === true ? {info: this.__infoRecord()}: {}
- ))
- if(err){
- return err
- }
- }else{
- const err = this.__push(record)
- if(err){
- return err
- }
- }
- }
- }
- this.__resetRecord()
- }
- __firstLineToColumns(record){
- const {firstLineToHeaders} = this.state
- try{
- const headers = firstLineToHeaders === undefined ? record : firstLineToHeaders.call(null, record)
- if(!Array.isArray(headers)){
- return this.__error(
- new CsvError('CSV_INVALID_COLUMN_MAPPING', [
- 'Invalid Column Mapping:',
- 'expect an array from column function,',
- `got ${JSON.stringify(headers)}`
- ], this.options, this.__infoField(), {
- headers: headers,
- })
- )
- }
- const normalizedHeaders = normalizeColumnsArray(headers)
- this.state.expectedRecordLength = normalizedHeaders.length
- this.options.columns = normalizedHeaders
- this.__resetRecord()
- return
- }catch(err){
- return err
- }
- }
- __resetRecord(){
- if(this.options.raw === true){
- this.state.rawBuffer.reset()
- }
- this.state.error = undefined
- this.state.record = []
- this.state.record_length = 0
- }
- __onField(){
- const {cast, encoding, rtrim, max_record_size} = this.options
- const {enabled, wasQuoting} = this.state
- // Short circuit for the from_line options
- if(enabled === false){
- return this.__resetField()
- }
- let field = this.state.field.toString(encoding)
- if(rtrim === true && wasQuoting === false){
- field = field.trimRight()
- }
- if(cast === true){
- const [err, f] = this.__cast(field)
- if(err !== undefined) return err
- field = f
- }
- this.state.record.push(field)
- // Increment record length if record size must not exceed a limit
- if(max_record_size !== 0 && typeof field === 'string'){
- this.state.record_length += field.length
- }
- this.__resetField()
- }
- __resetField(){
- this.state.field.reset()
- this.state.wasQuoting = false
- }
- __push(record){
- const {on_record} = this.options
- if(on_record !== undefined){
- const info = this.__infoRecord()
- try{
- record = on_record.call(null, record, info)
- }catch(err){
- return err
- }
- if(record === undefined || record === null){ return }
- }
- this.push(record)
- }
- // Return a tuple with the error and the casted value
- __cast(field){
- const {columns, relax_column_count} = this.options
- const isColumns = Array.isArray(columns)
- // Dont loose time calling cast
- // because the final record is an object
- // and this field can't be associated to a key present in columns
- if( isColumns === true && relax_column_count && this.options.columns.length <= this.state.record.length ){
- return [undefined, undefined]
- }
- if(this.state.castField !== null){
- try{
- const info = this.__infoField()
- return [undefined, this.state.castField.call(null, field, info)]
- }catch(err){
- return [err]
- }
- }
- if(this.__isFloat(field)){
- return [undefined, parseFloat(field)]
- }else if(this.options.cast_date !== false){
- const info = this.__infoField()
- return [undefined, this.options.cast_date.call(null, field, info)]
- }
- return [undefined, field]
- }
- // Helper to test if a character is a space or a line delimiter
- __isCharTrimable(chr){
- return chr === space || chr === tab || chr === cr || chr === nl || chr === np
- }
- // Keep it in case we implement the `cast_int` option
- // __isInt(value){
- // // return Number.isInteger(parseInt(value))
- // // return !isNaN( parseInt( obj ) );
- // return /^(\-|\+)?[1-9][0-9]*$/.test(value)
- // }
- __isFloat(value){
- return (value - parseFloat( value ) + 1) >= 0 // Borrowed from jquery
- }
- __compareBytes(sourceBuf, targetBuf, targetPos, firstByte){
- if(sourceBuf[0] !== firstByte) return 0
- const sourceLength = sourceBuf.length
- for(let i = 1; i < sourceLength; i++){
- if(sourceBuf[i] !== targetBuf[targetPos+i]) return 0
- }
- return sourceLength
- }
- __needMoreData(i, bufLen, end){
- if(end) return false
- const {quote} = this.options
- const {quoting, needMoreDataSize, recordDelimiterMaxLength} = this.state
- const numOfCharLeft = bufLen - i - 1
- const requiredLength = Math.max(
- needMoreDataSize,
- // Skip if the remaining buffer smaller than record delimiter
- recordDelimiterMaxLength,
- // Skip if the remaining buffer can be record delimiter following the closing quote
- // 1 is for quote.length
- quoting ? (quote.length + recordDelimiterMaxLength) : 0,
- )
- return numOfCharLeft < requiredLength
- }
- __isDelimiter(buf, pos, chr){
- const {delimiter, ignore_last_delimiters} = this.options
- if(ignore_last_delimiters === true && this.state.record.length === this.options.columns.length - 1){
- return 0
- }else if(ignore_last_delimiters !== false && typeof ignore_last_delimiters === 'number' && this.state.record.length === ignore_last_delimiters - 1){
- return 0
- }
- loop1: for(let i = 0; i < delimiter.length; i++){
- const del = delimiter[i]
- if(del[0] === chr){
- for(let j = 1; j < del.length; j++){
- if(del[j] !== buf[pos+j]) continue loop1
- }
- return del.length
- }
- }
- return 0
- }
- __isRecordDelimiter(chr, buf, pos){
- const {record_delimiter} = this.options
- const recordDelimiterLength = record_delimiter.length
- loop1: for(let i = 0; i < recordDelimiterLength; i++){
- const rd = record_delimiter[i]
- const rdLength = rd.length
- if(rd[0] !== chr){
- continue
- }
- for(let j = 1; j < rdLength; j++){
- if(rd[j] !== buf[pos+j]){
- continue loop1
- }
- }
- return rd.length
- }
- return 0
- }
- __isEscape(buf, pos, chr){
- const {escape} = this.options
- if(escape === null) return false
- const l = escape.length
- if(escape[0] === chr){
- for(let i = 0; i < l; i++){
- if(escape[i] !== buf[pos+i]){
- return false
- }
- }
- return true
- }
- return false
- }
- __isQuote(buf, pos){
- const {quote} = this.options
- if(quote === null) return false
- const l = quote.length
- for(let i = 0; i < l; i++){
- if(quote[i] !== buf[pos+i]){
- return false
- }
- }
- return true
- }
- __autoDiscoverRecordDelimiter(buf, pos){
- const {encoding} = this.options
- const chr = buf[pos]
- if(chr === cr){
- if(buf[pos+1] === nl){
- this.options.record_delimiter.push(Buffer.from('\r\n', encoding))
- this.state.recordDelimiterMaxLength = 2
- return 2
- }else{
- this.options.record_delimiter.push(Buffer.from('\r', encoding))
- this.state.recordDelimiterMaxLength = 1
- return 1
- }
- }else if(chr === nl){
- this.options.record_delimiter.push(Buffer.from('\n', encoding))
- this.state.recordDelimiterMaxLength = 1
- return 1
- }
- return 0
- }
- __error(msg){
- const {skip_lines_with_error} = this.options
- const err = typeof msg === 'string' ? new Error(msg) : msg
- if(skip_lines_with_error){
- this.state.recordHasError = true
- this.emit('skip', err)
- return undefined
- }else{
- return err
- }
- }
- __infoDataSet(){
- return {
- ...this.info,
- columns: this.options.columns
- }
- }
- __infoRecord(){
- const {columns} = this.options
- return {
- ...this.__infoDataSet(),
- error: this.state.error,
- header: columns === true,
- index: this.state.record.length,
- }
- }
- __infoField(){
- const {columns} = this.options
- const isColumns = Array.isArray(columns)
- return {
- ...this.__infoRecord(),
- column: isColumns === true ?
- ( columns.length > this.state.record.length ?
- columns[this.state.record.length].name :
- null
- ) :
- this.state.record.length,
- quoting: this.state.wasQuoting,
- }
- }
-}
-
-const parse = function(){
- let data, options, callback
- for(let i in arguments){
- const argument = arguments[i]
- const type = typeof argument
- if(data === undefined && (typeof argument === 'string' || Buffer.isBuffer(argument))){
- data = argument
- }else if(options === undefined && isObject(argument)){
- options = argument
- }else if(callback === undefined && type === 'function'){
- callback = argument
- }else{
- throw new CsvError('CSV_INVALID_ARGUMENT', [
- 'Invalid argument:',
- `got ${JSON.stringify(argument)} at index ${i}`
- ], options || {})
- }
- }
- const parser = new Parser(options)
- if(callback){
- const records = options === undefined || options.objname === undefined ? [] : {}
- parser.on('readable', function(){
- let record
- while((record = this.read()) !== null){
- if(options === undefined || options.objname === undefined){
- records.push(record)
- }else{
- records[record[0]] = record[1]
- }
- }
- })
- parser.on('error', function(err){
- callback(err, undefined, parser.__infoDataSet())
- })
- parser.on('end', function(){
- callback(undefined, records, parser.__infoDataSet())
- })
- }
- if(data !== undefined){
- // Give a chance for events to be registered later
- if(typeof setImmediate === 'function'){
- setImmediate(function(){
- parser.write(data)
- parser.end()
- })
- }else{
- parser.write(data)
- parser.end()
- }
- }
- return parser
-}
-
-class CsvError extends Error {
- constructor(code, message, options, ...contexts) {
- if(Array.isArray(message)) message = message.join(' ')
- super(message)
- if(Error.captureStackTrace !== undefined){
- Error.captureStackTrace(this, CsvError)
- }
- this.code = code
- for(const context of contexts){
- for(const key in context){
- const value = context[key]
- this[key] = Buffer.isBuffer(value) ? value.toString(options.encoding) : value == null ? value : JSON.parse(JSON.stringify(value))
- }
- }
- }
-}
-
-parse.Parser = Parser
-
-parse.CsvError = CsvError
-
-module.exports = parse
-
-const underscore = function(str){
- return str.replace(/([A-Z])/g, function(_, match){
- return '_' + match.toLowerCase()
- })
-}
-
-const isObject = function(obj){
- return (typeof obj === 'object' && obj !== null && !Array.isArray(obj))
-}
-
-const isRecordEmpty = function(record){
- return record.every( (field) => field == null || field.toString && field.toString().trim() === '' )
-}
-
-const normalizeColumnsArray = function(columns){
- const normalizedColumns = [];
- for(let i = 0, l = columns.length; i < l; i++){
- const column = columns[i]
- if(column === undefined || column === null || column === false){
- normalizedColumns[i] = { disabled: true }
- }else if(typeof column === 'string'){
- normalizedColumns[i] = { name: column }
- }else if(isObject(column)){
- if(typeof column.name !== 'string'){
- throw new CsvError('CSV_OPTION_COLUMNS_MISSING_NAME', [
- 'Option columns missing name:',
- `property "name" is required at position ${i}`,
- 'when column is an object literal'
- ])
- }
- normalizedColumns[i] = column
- }else{
- throw new CsvError('CSV_INVALID_COLUMN_DEFINITION', [
- 'Invalid column definition:',
- 'expect a string or a literal object,',
- `got ${JSON.stringify(column)} at position ${i}`
- ])
- }
- }
- return normalizedColumns;
-}
-
-}).call(this)}).call(this,require("buffer").Buffer,require("timers").setImmediate)
-},{"./ResizeableBuffer":3,"buffer":10,"stream":29,"timers":45}],5:[function(require,module,exports){
-(function (Buffer,setImmediate){(function (){
-
-/*
-CSV Stringify
-
-Please look at the [project documentation](https://csv.js.org/stringify/) for
-additional information.
-*/
-
-const { Transform } = require('stream')
-const bom_utf8 = Buffer.from([239, 187, 191])
-
-class Stringifier extends Transform {
- constructor(opts = {}){
- super({...{writableObjectMode: true}, ...opts})
- const options = {}
- let err
- // Merge with user options
- for(let opt in opts){
- options[underscore(opt)] = opts[opt]
- }
- if(err = this.normalize(options)) throw err
- switch(options.record_delimiter){
- case 'auto':
- options.record_delimiter = null
- break
- case 'unix':
- options.record_delimiter = "\n"
- break
- case 'mac':
- options.record_delimiter = "\r"
- break
- case 'windows':
- options.record_delimiter = "\r\n"
- break
- case 'ascii':
- options.record_delimiter = "\u001e"
- break
- case 'unicode':
- options.record_delimiter = "\u2028"
- break
- }
- // Expose options
- this.options = options
- // Internal state
- this.state = {
- stop: false
- }
- // Information
- this.info = {
- records: 0
- }
- }
- normalize(options){
- // Normalize option `bom`
- if(options.bom === undefined || options.bom === null || options.bom === false){
- options.bom = false
- }else if(options.bom !== true){
- return new CsvError('CSV_OPTION_BOOLEAN_INVALID_TYPE', [
- 'option `bom` is optional and must be a boolean value,',
- `got ${JSON.stringify(options.bom)}`
- ])
- }
- // Normalize option `delimiter`
- if(options.delimiter === undefined || options.delimiter === null){
- options.delimiter = ','
- }else if(Buffer.isBuffer(options.delimiter)){
- options.delimiter = options.delimiter.toString()
- }else if(typeof options.delimiter !== 'string'){
- return new CsvError('CSV_OPTION_DELIMITER_INVALID_TYPE', [
- 'option `delimiter` must be a buffer or a string,',
- `got ${JSON.stringify(options.delimiter)}`
- ])
- }
- // Normalize option `quote`
- if(options.quote === undefined || options.quote === null){
- options.quote = '"'
- }else if(options.quote === true){
- options.quote = '"'
- }else if(options.quote === false){
- options.quote = ''
- }else if (Buffer.isBuffer(options.quote)){
- options.quote = options.quote.toString()
- }else if(typeof options.quote !== 'string'){
- return new CsvError('CSV_OPTION_QUOTE_INVALID_TYPE', [
- 'option `quote` must be a boolean, a buffer or a string,',
- `got ${JSON.stringify(options.quote)}`
- ])
- }
- // Normalize option `quoted`
- if(options.quoted === undefined || options.quoted === null){
- options.quoted = false
- }else{
- // todo
- }
- // Normalize option `quoted_empty`
- if(options.quoted_empty === undefined || options.quoted_empty === null){
- options.quoted_empty = undefined
- }else{
- // todo
- }
- // Normalize option `quoted_match`
- if(options.quoted_match === undefined || options.quoted_match === null || options.quoted_match === false){
- options.quoted_match = null
- }else if(!Array.isArray(options.quoted_match)){
- options.quoted_match = [options.quoted_match]
- }
- if(options.quoted_match){
- for(let quoted_match of options.quoted_match){
- const isString = typeof quoted_match === 'string'
- const isRegExp = quoted_match instanceof RegExp
- if(!isString && !isRegExp){
- return Error(`Invalid Option: quoted_match must be a string or a regex, got ${JSON.stringify(quoted_match)}`)
- }
- }
- }
- // Normalize option `quoted_string`
- if(options.quoted_string === undefined || options.quoted_string === null){
- options.quoted_string = false
- }else{
- // todo
- }
- // Normalize option `eof`
- if(options.eof === undefined || options.eof === null){
- options.eof = true
- }else{
- // todo
- }
- // Normalize option `escape`
- if(options.escape === undefined || options.escape === null){
- options.escape = '"'
- }else if(Buffer.isBuffer(options.escape)){
- options.escape = options.escape.toString()
- }else if(typeof options.escape !== 'string'){
- return Error(`Invalid Option: escape must be a buffer or a string, got ${JSON.stringify(options.escape)}`)
- }
- if (options.escape.length > 1){
- return Error(`Invalid Option: escape must be one character, got ${options.escape.length} characters`)
- }
- // Normalize option `header`
- if(options.header === undefined || options.header === null){
- options.header = false
- }else{
- // todo
- }
- // Normalize option `columns`
- options.columns = this.normalize_columns(options.columns)
- // Normalize option `quoted`
- if(options.quoted === undefined || options.quoted === null){
- options.quoted = false
- }else{
- // todo
- }
- // Normalize option `cast`
- if(options.cast === undefined || options.cast === null){
- options.cast = {}
- }else{
- // todo
- }
- // Normalize option cast.bigint
- if(options.cast.bigint === undefined || options.cast.bigint === null){
- // Cast boolean to string by default
- options.cast.bigint = value => '' + value
- }
- // Normalize option cast.boolean
- if(options.cast.boolean === undefined || options.cast.boolean === null){
- // Cast boolean to string by default
- options.cast.boolean = value => value ? '1' : ''
- }
- // Normalize option cast.date
- if(options.cast.date === undefined || options.cast.date === null){
- // Cast date to timestamp string by default
- options.cast.date = value => '' + value.getTime()
- }
- // Normalize option cast.number
- if(options.cast.number === undefined || options.cast.number === null){
- // Cast number to string using native casting by default
- options.cast.number = value => '' + value
- }
- // Normalize option cast.object
- if(options.cast.object === undefined || options.cast.object === null){
- // Stringify object as JSON by default
- options.cast.object = value => JSON.stringify(value)
- }
- // Normalize option cast.string
- if(options.cast.string === undefined || options.cast.string === null){
- // Leave string untouched
- options.cast.string = function(value){return value}
- }
- // Normalize option `record_delimiter`
- if(options.record_delimiter === undefined || options.record_delimiter === null){
- options.record_delimiter = '\n'
- }else if(Buffer.isBuffer(options.record_delimiter)){
- options.record_delimiter = options.record_delimiter.toString()
- }else if(typeof options.record_delimiter !== 'string'){
- return Error(`Invalid Option: record_delimiter must be a buffer or a string, got ${JSON.stringify(options.record_delimiter)}`)
- }
- }
- _transform(chunk, encoding, callback){
- if(this.state.stop === true){
- return
- }
- // Chunk validation
- if(!Array.isArray(chunk) && typeof chunk !== 'object'){
- this.state.stop = true
- return callback(Error(`Invalid Record: expect an array or an object, got ${JSON.stringify(chunk)}`))
- }
- // Detect columns from the first record
- if(this.info.records === 0){
- if(Array.isArray(chunk)){
- if(this.options.header === true && !this.options.columns){
- this.state.stop = true
- return callback(Error('Undiscoverable Columns: header option requires column option or object records'))
- }
- }else if(this.options.columns === undefined || this.options.columns === null){
- this.options.columns = this.normalize_columns(Object.keys(chunk))
- }
- }
- // Emit the header
- if(this.info.records === 0){
- this.bom()
- this.headers()
- }
- // Emit and stringify the record if an object or an array
- try{
- this.emit('record', chunk, this.info.records)
- }catch(err){
- this.state.stop = true
- return this.emit('error', err)
- }
- // Convert the record into a string
- let chunk_string
- if(this.options.eof){
- chunk_string = this.stringify(chunk)
- if(chunk_string === undefined){
- return
- }else{
- chunk_string = chunk_string + this.options.record_delimiter
- }
- }else{
- chunk_string = this.stringify(chunk)
- if(chunk_string === undefined){
- return
- }else{
- if(this.options.header || this.info.records){
- chunk_string = this.options.record_delimiter + chunk_string
- }
- }
- }
- // Emit the csv
- this.info.records++
- this.push(chunk_string)
- callback()
- }
- _flush(callback){
- if(this.info.records === 0){
- this.bom()
- this.headers()
- }
- callback()
- }
- stringify(chunk, chunkIsHeader=false){
- if(typeof chunk !== 'object'){
- return chunk
- }
- const {columns, header} = this.options
- const record = []
- // Record is an array
- if(Array.isArray(chunk)){
- // We are getting an array but the user has specified output columns. In
- // this case, we respect the columns indexes
- if(columns){
- chunk.splice(columns.length)
- }
- // Cast record elements
- for(let i=0; i= 0
- const containsQuote = (quote !== '') && value.indexOf(quote) >= 0
- const containsEscape = value.indexOf(escape) >= 0 && (escape !== quote)
- const containsRecordDelimiter = value.indexOf(record_delimiter) >= 0
- const quotedString = quoted_string && typeof field === 'string'
- let quotedMatch = quoted_match && quoted_match.filter( quoted_match => {
- if(typeof quoted_match === 'string'){
- return value.indexOf(quoted_match) !== -1
- }else{
- return quoted_match.test(value)
- }
- })
- quotedMatch = quotedMatch && quotedMatch.length > 0
- const shouldQuote = containsQuote === true || containsdelimiter || containsRecordDelimiter || quoted || quotedString || quotedMatch
- if(shouldQuote === true && containsEscape === true){
- const regexp = escape === '\\'
- ? new RegExp(escape + escape, 'g')
- : new RegExp(escape, 'g')
- value = value.replace(regexp, escape + escape)
- }
- if(containsQuote === true){
- const regexp = new RegExp(quote,'g')
- value = value.replace(regexp, escape + quote)
- }
- if(shouldQuote === true){
- value = quote + value + quote
- }
- csvrecord += value
- }else if(quoted_empty === true || (field === '' && quoted_string === true && quoted_empty !== false)){
- csvrecord += quote + quote
- }
- if(i !== record.length - 1){
- csvrecord += delimiter
- }
- }
- return csvrecord
- }
- bom(){
- if(this.options.bom !== true){
- return
- }
- this.push(bom_utf8)
- }
- headers(){
- if(this.options.header === false){
- return
- }
- if(this.options.columns === undefined){
- return
- }
- let headers = this.options.columns.map(column => column.header)
- if(this.options.eof){
- headers = this.stringify(headers, true) + this.options.record_delimiter
- }else{
- headers = this.stringify(headers)
- }
- this.push(headers)
- }
- __cast(value, context){
- const type = typeof value
- try{
- if(type === 'string'){ // Fine for 99% of the cases
- return [undefined, this.options.cast.string(value, context)]
- }else if(type === 'bigint'){
- return [undefined, this.options.cast.bigint(value, context)]
- }else if(type === 'number'){
- return [undefined, this.options.cast.number(value, context)]
- }else if(type === 'boolean'){
- return [undefined, this.options.cast.boolean(value, context)]
- }else if(value instanceof Date){
- return [undefined, this.options.cast.date(value, context)]
- }else if(type === 'object' && value !== null){
- return [undefined, this.options.cast.object(value, context)]
- }else{
- return [undefined, value, value]
- }
- }catch(err){
- return [err]
- }
- }
- normalize_columns(columns){
- if(columns === undefined || columns === null){
- return undefined
- }
- if(typeof columns !== 'object'){
- throw Error('Invalid option "columns": expect an array or an object')
- }
- if(!Array.isArray(columns)){
- const newcolumns = []
- for(let k in columns){
- newcolumns.push({
- key: k,
- header: columns[k]
- })
- }
- columns = newcolumns
- }else{
- const newcolumns = []
- for(let column of columns){
- if(typeof column === 'string'){
- newcolumns.push({
- key: column,
- header: column
- })
- }else if(typeof column === 'object' && column !== undefined && !Array.isArray(column)){
- if(!column.key){
- throw Error('Invalid column definition: property "key" is required')
- }
- if(column.header === undefined){
- column.header = column.key
- }
- newcolumns.push(column)
- }else{
- throw Error('Invalid column definition: expect a string or an object')
- }
- }
- columns = newcolumns
- }
- return columns
- }
-}
-
-const stringify = function(){
- let data, options, callback
- for(let i in arguments){
- const argument = arguments[i]
- const type = typeof argument
- if(data === undefined && (Array.isArray(argument))){
- data = argument
- }else if(options === undefined && isObject(argument)){
- options = argument
- }else if(callback === undefined && type === 'function'){
- callback = argument
- }else{
- throw new CsvError('CSV_INVALID_ARGUMENT', [
- 'Invalid argument:',
- `got ${JSON.stringify(argument)} at index ${i}`
- ])
- }
- }
- const stringifier = new Stringifier(options)
- if(callback){
- const chunks = []
- stringifier.on('readable', function(){
- let chunk
- while((chunk = this.read()) !== null){
- chunks.push(chunk)
- }
- })
- stringifier.on('error', function(err){
- callback(err)
- })
- stringifier.on('end', function(){
- callback(undefined, chunks.join(''))
- })
- }
- if(data !== undefined){
- // Give a chance for events to be registered later
- if(typeof setImmediate === 'function'){
- setImmediate(function(){
- for(let record of data){
- stringifier.write(record)
- }
- stringifier.end()
- })
- }else{
- for(let record of data){
- stringifier.write(record)
- }
- stringifier.end()
- }
- }
- return stringifier
-}
-
-class CsvError extends Error {
- constructor(code, message, ...contexts) {
- if(Array.isArray(message)) message = message.join(' ')
- super(message)
- if(Error.captureStackTrace !== undefined){
- Error.captureStackTrace(this, CsvError)
- }
- this.code = code
- for(const context of contexts){
- for(const key in context){
- const value = context[key]
- this[key] = Buffer.isBuffer(value) ? value.toString() : value == null ? value : JSON.parse(JSON.stringify(value))
- }
- }
- }
-}
-
-stringify.Stringifier = Stringifier
-
-stringify.CsvError = CsvError
-
-module.exports = stringify
-
-const isObject = function(obj){
- return typeof obj === 'object' && obj !== null && ! Array.isArray(obj)
-}
-
-const underscore = function(str){
- return str.replace(/([A-Z])/g, function(_, match){
- return '_' + match.toLowerCase()
- })
-}
-
-// Lodash implementation of `get`
-
-const charCodeOfDot = '.'.charCodeAt(0)
-const reEscapeChar = /\\(\\)?/g
-const rePropName = RegExp(
- // Match anything that isn't a dot or bracket.
- '[^.[\\]]+' + '|' +
- // Or match property names within brackets.
- '\\[(?:' +
- // Match a non-string expression.
- '([^"\'][^[]*)' + '|' +
- // Or match strings (supports escaping characters).
- '(["\'])((?:(?!\\2)[^\\\\]|\\\\.)*?)\\2' +
- ')\\]'+ '|' +
- // Or match "" as the space between consecutive dots or empty brackets.
- '(?=(?:\\.|\\[\\])(?:\\.|\\[\\]|$))'
-, 'g')
-const reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/
-const reIsPlainProp = /^\w*$/
-const getTag = function(value){
- if(!value)
- value === undefined ? '[object Undefined]' : '[object Null]'
- return Object.prototype.toString.call(value)
-}
-const isKey = function(value, object){
- if(Array.isArray(value)){
- return false
- }
- const type = typeof value
- if(type === 'number' || type === 'symbol' || type === 'boolean' || !value || isSymbol(value)){
- return true
- }
- return reIsPlainProp.test(value) || !reIsDeepProp.test(value) ||
- (object != null && value in Object(object))
-}
-const isSymbol = function(value){
- const type = typeof value
- return type === 'symbol' || (type === 'object' && value && getTag(value) === '[object Symbol]')
-}
-const stringToPath = function(string){
- const result = []
- if(string.charCodeAt(0) === charCodeOfDot){
- result.push('')
- }
- string.replace(rePropName, function(match, expression, quote, subString){
- let key = match
- if(quote){
- key = subString.replace(reEscapeChar, '$1')
- }else if(expression){
- key = expression.trim()
- }
- result.push(key)
- })
- return result
-}
-const castPath = function(value, object){
- if(Array.isArray(value)){
- return value
- } else {
- return isKey(value, object) ? [value] : stringToPath(value)
- }
-}
-const toKey = function(value){
- if(typeof value === 'string' || isSymbol(value))
- return value
- const result = `${value}`
- return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result
-}
-const get = function(object, path){
- path = castPath(path, object)
- let index = 0
- const length = path.length
- while(object != null && index < length){
- object = object[toKey(path[index++])]
- }
- return (index && index === length) ? object : undefined
-}
-
-}).call(this)}).call(this,require("buffer").Buffer,require("timers").setImmediate)
-},{"buffer":10,"stream":29,"timers":45}],6:[function(require,module,exports){
-// Alias to the ES6 modules exposing the stream and callback APIs
-module.exports = {
- generate: require('csv-generate/lib'),
- parse: require('csv-parse/lib'),
- transform: require('stream-transform/lib'),
- stringify: require('csv-stringify/lib')
-};
-
-},{"csv-generate/lib":2,"csv-parse/lib":4,"csv-stringify/lib":5,"stream-transform/lib":51}],7:[function(require,module,exports){
-(function (global){(function (){
-'use strict';
-
-var possibleNames = [
- 'BigInt64Array',
- 'BigUint64Array',
- 'Float32Array',
- 'Float64Array',
- 'Int16Array',
- 'Int32Array',
- 'Int8Array',
- 'Uint16Array',
- 'Uint32Array',
- 'Uint8Array',
- 'Uint8ClampedArray'
-];
-
-module.exports = function availableTypedArrays() {
- var out = [];
- for (var i = 0; i < possibleNames.length; i++) {
- if (typeof global[possibleNames[i]] === 'function') {
- out[out.length] = possibleNames[i];
- }
- }
- return out;
-};
-
-}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{}],8:[function(require,module,exports){
-'use strict'
-
-exports.byteLength = byteLength
-exports.toByteArray = toByteArray
-exports.fromByteArray = fromByteArray
-
-var lookup = []
-var revLookup = []
-var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array
-
-var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
-for (var i = 0, len = code.length; i < len; ++i) {
- lookup[i] = code[i]
- revLookup[code.charCodeAt(i)] = i
-}
-
-// Support decoding URL-safe base64 strings, as Node.js does.
-// See: https://en.wikipedia.org/wiki/Base64#URL_applications
-revLookup['-'.charCodeAt(0)] = 62
-revLookup['_'.charCodeAt(0)] = 63
-
-function getLens (b64) {
- var len = b64.length
-
- if (len % 4 > 0) {
- throw new Error('Invalid string. Length must be a multiple of 4')
- }
-
- // Trim off extra bytes after placeholder bytes are found
- // See: https://github.com/beatgammit/base64-js/issues/42
- var validLen = b64.indexOf('=')
- if (validLen === -1) validLen = len
-
- var placeHoldersLen = validLen === len
- ? 0
- : 4 - (validLen % 4)
-
- return [validLen, placeHoldersLen]
-}
-
-// base64 is 4/3 + up to two characters of the original data
-function byteLength (b64) {
- var lens = getLens(b64)
- var validLen = lens[0]
- var placeHoldersLen = lens[1]
- return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
-}
-
-function _byteLength (b64, validLen, placeHoldersLen) {
- return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
-}
-
-function toByteArray (b64) {
- var tmp
- var lens = getLens(b64)
- var validLen = lens[0]
- var placeHoldersLen = lens[1]
-
- var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))
-
- var curByte = 0
-
- // if there are placeholders, only get up to the last complete 4 chars
- var len = placeHoldersLen > 0
- ? validLen - 4
- : validLen
-
- var i
- for (i = 0; i < len; i += 4) {
- tmp =
- (revLookup[b64.charCodeAt(i)] << 18) |
- (revLookup[b64.charCodeAt(i + 1)] << 12) |
- (revLookup[b64.charCodeAt(i + 2)] << 6) |
- revLookup[b64.charCodeAt(i + 3)]
- arr[curByte++] = (tmp >> 16) & 0xFF
- arr[curByte++] = (tmp >> 8) & 0xFF
- arr[curByte++] = tmp & 0xFF
- }
-
- if (placeHoldersLen === 2) {
- tmp =
- (revLookup[b64.charCodeAt(i)] << 2) |
- (revLookup[b64.charCodeAt(i + 1)] >> 4)
- arr[curByte++] = tmp & 0xFF
- }
-
- if (placeHoldersLen === 1) {
- tmp =
- (revLookup[b64.charCodeAt(i)] << 10) |
- (revLookup[b64.charCodeAt(i + 1)] << 4) |
- (revLookup[b64.charCodeAt(i + 2)] >> 2)
- arr[curByte++] = (tmp >> 8) & 0xFF
- arr[curByte++] = tmp & 0xFF
- }
-
- return arr
-}
-
-function tripletToBase64 (num) {
- return lookup[num >> 18 & 0x3F] +
- lookup[num >> 12 & 0x3F] +
- lookup[num >> 6 & 0x3F] +
- lookup[num & 0x3F]
-}
-
-function encodeChunk (uint8, start, end) {
- var tmp
- var output = []
- for (var i = start; i < end; i += 3) {
- tmp =
- ((uint8[i] << 16) & 0xFF0000) +
- ((uint8[i + 1] << 8) & 0xFF00) +
- (uint8[i + 2] & 0xFF)
- output.push(tripletToBase64(tmp))
- }
- return output.join('')
-}
-
-function fromByteArray (uint8) {
- var tmp
- var len = uint8.length
- var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
- var parts = []
- var maxChunkLength = 16383 // must be multiple of 3
-
- // go through the array every three bytes, we'll deal with trailing stuff later
- for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
- parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
- }
-
- // pad the end with zeros, but make sure to not forget the extra bytes
- if (extraBytes === 1) {
- tmp = uint8[len - 1]
- parts.push(
- lookup[tmp >> 2] +
- lookup[(tmp << 4) & 0x3F] +
- '=='
- )
- } else if (extraBytes === 2) {
- tmp = (uint8[len - 2] << 8) + uint8[len - 1]
- parts.push(
- lookup[tmp >> 10] +
- lookup[(tmp >> 4) & 0x3F] +
- lookup[(tmp << 2) & 0x3F] +
- '='
- )
- }
-
- return parts.join('')
-}
-
-},{}],9:[function(require,module,exports){
-
-},{}],10:[function(require,module,exports){
-(function (Buffer){(function (){
-/*!
- * The buffer module from node.js, for the browser.
- *
- * @author Feross Aboukhadijeh
- * @license MIT
- */
-/* eslint-disable no-proto */
-
-'use strict'
-
-var base64 = require('base64-js')
-var ieee754 = require('ieee754')
-
-exports.Buffer = Buffer
-exports.SlowBuffer = SlowBuffer
-exports.INSPECT_MAX_BYTES = 50
-
-var K_MAX_LENGTH = 0x7fffffff
-exports.kMaxLength = K_MAX_LENGTH
-
-/**
- * If `Buffer.TYPED_ARRAY_SUPPORT`:
- * === true Use Uint8Array implementation (fastest)
- * === false Print warning and recommend using `buffer` v4.x which has an Object
- * implementation (most compatible, even IE6)
- *
- * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
- * Opera 11.6+, iOS 4.2+.
- *
- * We report that the browser does not support typed arrays if the are not subclassable
- * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array`
- * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support
- * for __proto__ and has a buggy typed array implementation.
- */
-Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport()
-
-if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' &&
- typeof console.error === 'function') {
- console.error(
- 'This browser lacks typed array (Uint8Array) support which is required by ' +
- '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.'
- )
-}
-
-function typedArraySupport () {
- // Can typed array instances can be augmented?
- try {
- var arr = new Uint8Array(1)
- arr.__proto__ = { __proto__: Uint8Array.prototype, foo: function () { return 42 } }
- return arr.foo() === 42
- } catch (e) {
- return false
- }
-}
-
-Object.defineProperty(Buffer.prototype, 'parent', {
- enumerable: true,
- get: function () {
- if (!Buffer.isBuffer(this)) return undefined
- return this.buffer
- }
-})
-
-Object.defineProperty(Buffer.prototype, 'offset', {
- enumerable: true,
- get: function () {
- if (!Buffer.isBuffer(this)) return undefined
- return this.byteOffset
- }
-})
-
-function createBuffer (length) {
- if (length > K_MAX_LENGTH) {
- throw new RangeError('The value "' + length + '" is invalid for option "size"')
- }
- // Return an augmented `Uint8Array` instance
- var buf = new Uint8Array(length)
- buf.__proto__ = Buffer.prototype
- return buf
-}
-
-/**
- * The Buffer constructor returns instances of `Uint8Array` that have their
- * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
- * `Uint8Array`, so the returned instances will have all the node `Buffer` methods
- * and the `Uint8Array` methods. Square bracket notation works as expected -- it
- * returns a single octet.
- *
- * The `Uint8Array` prototype remains unmodified.
- */
-
-function Buffer (arg, encodingOrOffset, length) {
- // Common case.
- if (typeof arg === 'number') {
- if (typeof encodingOrOffset === 'string') {
- throw new TypeError(
- 'The "string" argument must be of type string. Received type number'
- )
- }
- return allocUnsafe(arg)
- }
- return from(arg, encodingOrOffset, length)
-}
-
-// Fix subarray() in ES2016. See: https://github.com/feross/buffer/pull/97
-if (typeof Symbol !== 'undefined' && Symbol.species != null &&
- Buffer[Symbol.species] === Buffer) {
- Object.defineProperty(Buffer, Symbol.species, {
- value: null,
- configurable: true,
- enumerable: false,
- writable: false
- })
-}
-
-Buffer.poolSize = 8192 // not used by this implementation
-
-function from (value, encodingOrOffset, length) {
- if (typeof value === 'string') {
- return fromString(value, encodingOrOffset)
- }
-
- if (ArrayBuffer.isView(value)) {
- return fromArrayLike(value)
- }
-
- if (value == null) {
- throw TypeError(
- 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
- 'or Array-like Object. Received type ' + (typeof value)
- )
- }
-
- if (isInstance(value, ArrayBuffer) ||
- (value && isInstance(value.buffer, ArrayBuffer))) {
- return fromArrayBuffer(value, encodingOrOffset, length)
- }
-
- if (typeof value === 'number') {
- throw new TypeError(
- 'The "value" argument must not be of type number. Received type number'
- )
- }
-
- var valueOf = value.valueOf && value.valueOf()
- if (valueOf != null && valueOf !== value) {
- return Buffer.from(valueOf, encodingOrOffset, length)
- }
-
- var b = fromObject(value)
- if (b) return b
-
- if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null &&
- typeof value[Symbol.toPrimitive] === 'function') {
- return Buffer.from(
- value[Symbol.toPrimitive]('string'), encodingOrOffset, length
- )
- }
-
- throw new TypeError(
- 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
- 'or Array-like Object. Received type ' + (typeof value)
- )
-}
-
-/**
- * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
- * if value is a number.
- * Buffer.from(str[, encoding])
- * Buffer.from(array)
- * Buffer.from(buffer)
- * Buffer.from(arrayBuffer[, byteOffset[, length]])
- **/
-Buffer.from = function (value, encodingOrOffset, length) {
- return from(value, encodingOrOffset, length)
-}
-
-// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug:
-// https://github.com/feross/buffer/pull/148
-Buffer.prototype.__proto__ = Uint8Array.prototype
-Buffer.__proto__ = Uint8Array
-
-function assertSize (size) {
- if (typeof size !== 'number') {
- throw new TypeError('"size" argument must be of type number')
- } else if (size < 0) {
- throw new RangeError('The value "' + size + '" is invalid for option "size"')
- }
-}
-
-function alloc (size, fill, encoding) {
- assertSize(size)
- if (size <= 0) {
- return createBuffer(size)
- }
- if (fill !== undefined) {
- // Only pay attention to encoding if it's a string. This
- // prevents accidentally sending in a number that would
- // be interpretted as a start offset.
- return typeof encoding === 'string'
- ? createBuffer(size).fill(fill, encoding)
- : createBuffer(size).fill(fill)
- }
- return createBuffer(size)
-}
-
-/**
- * Creates a new filled Buffer instance.
- * alloc(size[, fill[, encoding]])
- **/
-Buffer.alloc = function (size, fill, encoding) {
- return alloc(size, fill, encoding)
-}
-
-function allocUnsafe (size) {
- assertSize(size)
- return createBuffer(size < 0 ? 0 : checked(size) | 0)
-}
-
-/**
- * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
- * */
-Buffer.allocUnsafe = function (size) {
- return allocUnsafe(size)
-}
-/**
- * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
- */
-Buffer.allocUnsafeSlow = function (size) {
- return allocUnsafe(size)
-}
-
-function fromString (string, encoding) {
- if (typeof encoding !== 'string' || encoding === '') {
- encoding = 'utf8'
- }
-
- if (!Buffer.isEncoding(encoding)) {
- throw new TypeError('Unknown encoding: ' + encoding)
- }
-
- var length = byteLength(string, encoding) | 0
- var buf = createBuffer(length)
-
- var actual = buf.write(string, encoding)
-
- if (actual !== length) {
- // Writing a hex string, for example, that contains invalid characters will
- // cause everything after the first invalid character to be ignored. (e.g.
- // 'abxxcd' will be treated as 'ab')
- buf = buf.slice(0, actual)
- }
-
- return buf
-}
-
-function fromArrayLike (array) {
- var length = array.length < 0 ? 0 : checked(array.length) | 0
- var buf = createBuffer(length)
- for (var i = 0; i < length; i += 1) {
- buf[i] = array[i] & 255
- }
- return buf
-}
-
-function fromArrayBuffer (array, byteOffset, length) {
- if (byteOffset < 0 || array.byteLength < byteOffset) {
- throw new RangeError('"offset" is outside of buffer bounds')
- }
-
- if (array.byteLength < byteOffset + (length || 0)) {
- throw new RangeError('"length" is outside of buffer bounds')
- }
-
- var buf
- if (byteOffset === undefined && length === undefined) {
- buf = new Uint8Array(array)
- } else if (length === undefined) {
- buf = new Uint8Array(array, byteOffset)
- } else {
- buf = new Uint8Array(array, byteOffset, length)
- }
-
- // Return an augmented `Uint8Array` instance
- buf.__proto__ = Buffer.prototype
- return buf
-}
-
-function fromObject (obj) {
- if (Buffer.isBuffer(obj)) {
- var len = checked(obj.length) | 0
- var buf = createBuffer(len)
-
- if (buf.length === 0) {
- return buf
- }
-
- obj.copy(buf, 0, 0, len)
- return buf
- }
-
- if (obj.length !== undefined) {
- if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) {
- return createBuffer(0)
- }
- return fromArrayLike(obj)
- }
-
- if (obj.type === 'Buffer' && Array.isArray(obj.data)) {
- return fromArrayLike(obj.data)
- }
-}
-
-function checked (length) {
- // Note: cannot use `length < K_MAX_LENGTH` here because that fails when
- // length is NaN (which is otherwise coerced to zero.)
- if (length >= K_MAX_LENGTH) {
- throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
- 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes')
- }
- return length | 0
-}
-
-function SlowBuffer (length) {
- if (+length != length) { // eslint-disable-line eqeqeq
- length = 0
- }
- return Buffer.alloc(+length)
-}
-
-Buffer.isBuffer = function isBuffer (b) {
- return b != null && b._isBuffer === true &&
- b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false
-}
-
-Buffer.compare = function compare (a, b) {
- if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength)
- if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength)
- if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
- throw new TypeError(
- 'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array'
- )
- }
-
- if (a === b) return 0
-
- var x = a.length
- var y = b.length
-
- for (var i = 0, len = Math.min(x, y); i < len; ++i) {
- if (a[i] !== b[i]) {
- x = a[i]
- y = b[i]
- break
- }
- }
-
- if (x < y) return -1
- if (y < x) return 1
- return 0
-}
-
-Buffer.isEncoding = function isEncoding (encoding) {
- switch (String(encoding).toLowerCase()) {
- case 'hex':
- case 'utf8':
- case 'utf-8':
- case 'ascii':
- case 'latin1':
- case 'binary':
- case 'base64':
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return true
- default:
- return false
- }
-}
-
-Buffer.concat = function concat (list, length) {
- if (!Array.isArray(list)) {
- throw new TypeError('"list" argument must be an Array of Buffers')
- }
-
- if (list.length === 0) {
- return Buffer.alloc(0)
- }
-
- var i
- if (length === undefined) {
- length = 0
- for (i = 0; i < list.length; ++i) {
- length += list[i].length
- }
- }
-
- var buffer = Buffer.allocUnsafe(length)
- var pos = 0
- for (i = 0; i < list.length; ++i) {
- var buf = list[i]
- if (isInstance(buf, Uint8Array)) {
- buf = Buffer.from(buf)
- }
- if (!Buffer.isBuffer(buf)) {
- throw new TypeError('"list" argument must be an Array of Buffers')
- }
- buf.copy(buffer, pos)
- pos += buf.length
- }
- return buffer
-}
-
-function byteLength (string, encoding) {
- if (Buffer.isBuffer(string)) {
- return string.length
- }
- if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) {
- return string.byteLength
- }
- if (typeof string !== 'string') {
- throw new TypeError(
- 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' +
- 'Received type ' + typeof string
- )
- }
-
- var len = string.length
- var mustMatch = (arguments.length > 2 && arguments[2] === true)
- if (!mustMatch && len === 0) return 0
-
- // Use a for loop to avoid recursion
- var loweredCase = false
- for (;;) {
- switch (encoding) {
- case 'ascii':
- case 'latin1':
- case 'binary':
- return len
- case 'utf8':
- case 'utf-8':
- return utf8ToBytes(string).length
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return len * 2
- case 'hex':
- return len >>> 1
- case 'base64':
- return base64ToBytes(string).length
- default:
- if (loweredCase) {
- return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8
- }
- encoding = ('' + encoding).toLowerCase()
- loweredCase = true
- }
- }
-}
-Buffer.byteLength = byteLength
-
-function slowToString (encoding, start, end) {
- var loweredCase = false
-
- // No need to verify that "this.length <= MAX_UINT32" since it's a read-only
- // property of a typed array.
-
- // This behaves neither like String nor Uint8Array in that we set start/end
- // to their upper/lower bounds if the value passed is out of range.
- // undefined is handled specially as per ECMA-262 6th Edition,
- // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
- if (start === undefined || start < 0) {
- start = 0
- }
- // Return early if start > this.length. Done here to prevent potential uint32
- // coercion fail below.
- if (start > this.length) {
- return ''
- }
-
- if (end === undefined || end > this.length) {
- end = this.length
- }
-
- if (end <= 0) {
- return ''
- }
-
- // Force coersion to uint32. This will also coerce falsey/NaN values to 0.
- end >>>= 0
- start >>>= 0
-
- if (end <= start) {
- return ''
- }
-
- if (!encoding) encoding = 'utf8'
-
- while (true) {
- switch (encoding) {
- case 'hex':
- return hexSlice(this, start, end)
-
- case 'utf8':
- case 'utf-8':
- return utf8Slice(this, start, end)
-
- case 'ascii':
- return asciiSlice(this, start, end)
-
- case 'latin1':
- case 'binary':
- return latin1Slice(this, start, end)
-
- case 'base64':
- return base64Slice(this, start, end)
-
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return utf16leSlice(this, start, end)
-
- default:
- if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
- encoding = (encoding + '').toLowerCase()
- loweredCase = true
- }
- }
-}
-
-// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package)
-// to detect a Buffer instance. It's not possible to use `instanceof Buffer`
-// reliably in a browserify context because there could be multiple different
-// copies of the 'buffer' package in use. This method works even for Buffer
-// instances that were created from another copy of the `buffer` package.
-// See: https://github.com/feross/buffer/issues/154
-Buffer.prototype._isBuffer = true
-
-function swap (b, n, m) {
- var i = b[n]
- b[n] = b[m]
- b[m] = i
-}
-
-Buffer.prototype.swap16 = function swap16 () {
- var len = this.length
- if (len % 2 !== 0) {
- throw new RangeError('Buffer size must be a multiple of 16-bits')
- }
- for (var i = 0; i < len; i += 2) {
- swap(this, i, i + 1)
- }
- return this
-}
-
-Buffer.prototype.swap32 = function swap32 () {
- var len = this.length
- if (len % 4 !== 0) {
- throw new RangeError('Buffer size must be a multiple of 32-bits')
- }
- for (var i = 0; i < len; i += 4) {
- swap(this, i, i + 3)
- swap(this, i + 1, i + 2)
- }
- return this
-}
-
-Buffer.prototype.swap64 = function swap64 () {
- var len = this.length
- if (len % 8 !== 0) {
- throw new RangeError('Buffer size must be a multiple of 64-bits')
- }
- for (var i = 0; i < len; i += 8) {
- swap(this, i, i + 7)
- swap(this, i + 1, i + 6)
- swap(this, i + 2, i + 5)
- swap(this, i + 3, i + 4)
- }
- return this
-}
-
-Buffer.prototype.toString = function toString () {
- var length = this.length
- if (length === 0) return ''
- if (arguments.length === 0) return utf8Slice(this, 0, length)
- return slowToString.apply(this, arguments)
-}
-
-Buffer.prototype.toLocaleString = Buffer.prototype.toString
-
-Buffer.prototype.equals = function equals (b) {
- if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
- if (this === b) return true
- return Buffer.compare(this, b) === 0
-}
-
-Buffer.prototype.inspect = function inspect () {
- var str = ''
- var max = exports.INSPECT_MAX_BYTES
- str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim()
- if (this.length > max) str += ' ... '
- return ''
-}
-
-Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
- if (isInstance(target, Uint8Array)) {
- target = Buffer.from(target, target.offset, target.byteLength)
- }
- if (!Buffer.isBuffer(target)) {
- throw new TypeError(
- 'The "target" argument must be one of type Buffer or Uint8Array. ' +
- 'Received type ' + (typeof target)
- )
- }
-
- if (start === undefined) {
- start = 0
- }
- if (end === undefined) {
- end = target ? target.length : 0
- }
- if (thisStart === undefined) {
- thisStart = 0
- }
- if (thisEnd === undefined) {
- thisEnd = this.length
- }
-
- if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
- throw new RangeError('out of range index')
- }
-
- if (thisStart >= thisEnd && start >= end) {
- return 0
- }
- if (thisStart >= thisEnd) {
- return -1
- }
- if (start >= end) {
- return 1
- }
-
- start >>>= 0
- end >>>= 0
- thisStart >>>= 0
- thisEnd >>>= 0
-
- if (this === target) return 0
-
- var x = thisEnd - thisStart
- var y = end - start
- var len = Math.min(x, y)
-
- var thisCopy = this.slice(thisStart, thisEnd)
- var targetCopy = target.slice(start, end)
-
- for (var i = 0; i < len; ++i) {
- if (thisCopy[i] !== targetCopy[i]) {
- x = thisCopy[i]
- y = targetCopy[i]
- break
- }
- }
-
- if (x < y) return -1
- if (y < x) return 1
- return 0
-}
-
-// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
-// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
-//
-// Arguments:
-// - buffer - a Buffer to search
-// - val - a string, Buffer, or number
-// - byteOffset - an index into `buffer`; will be clamped to an int32
-// - encoding - an optional encoding, relevant is val is a string
-// - dir - true for indexOf, false for lastIndexOf
-function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
- // Empty buffer means no match
- if (buffer.length === 0) return -1
-
- // Normalize byteOffset
- if (typeof byteOffset === 'string') {
- encoding = byteOffset
- byteOffset = 0
- } else if (byteOffset > 0x7fffffff) {
- byteOffset = 0x7fffffff
- } else if (byteOffset < -0x80000000) {
- byteOffset = -0x80000000
- }
- byteOffset = +byteOffset // Coerce to Number.
- if (numberIsNaN(byteOffset)) {
- // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
- byteOffset = dir ? 0 : (buffer.length - 1)
- }
-
- // Normalize byteOffset: negative offsets start from the end of the buffer
- if (byteOffset < 0) byteOffset = buffer.length + byteOffset
- if (byteOffset >= buffer.length) {
- if (dir) return -1
- else byteOffset = buffer.length - 1
- } else if (byteOffset < 0) {
- if (dir) byteOffset = 0
- else return -1
- }
-
- // Normalize val
- if (typeof val === 'string') {
- val = Buffer.from(val, encoding)
- }
-
- // Finally, search either indexOf (if dir is true) or lastIndexOf
- if (Buffer.isBuffer(val)) {
- // Special case: looking for empty string/buffer always fails
- if (val.length === 0) {
- return -1
- }
- return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
- } else if (typeof val === 'number') {
- val = val & 0xFF // Search for a byte value [0-255]
- if (typeof Uint8Array.prototype.indexOf === 'function') {
- if (dir) {
- return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
- } else {
- return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
- }
- }
- return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir)
- }
-
- throw new TypeError('val must be string, number or Buffer')
-}
-
-function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
- var indexSize = 1
- var arrLength = arr.length
- var valLength = val.length
-
- if (encoding !== undefined) {
- encoding = String(encoding).toLowerCase()
- if (encoding === 'ucs2' || encoding === 'ucs-2' ||
- encoding === 'utf16le' || encoding === 'utf-16le') {
- if (arr.length < 2 || val.length < 2) {
- return -1
- }
- indexSize = 2
- arrLength /= 2
- valLength /= 2
- byteOffset /= 2
- }
- }
-
- function read (buf, i) {
- if (indexSize === 1) {
- return buf[i]
- } else {
- return buf.readUInt16BE(i * indexSize)
- }
- }
-
- var i
- if (dir) {
- var foundIndex = -1
- for (i = byteOffset; i < arrLength; i++) {
- if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
- if (foundIndex === -1) foundIndex = i
- if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
- } else {
- if (foundIndex !== -1) i -= i - foundIndex
- foundIndex = -1
- }
- }
- } else {
- if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength
- for (i = byteOffset; i >= 0; i--) {
- var found = true
- for (var j = 0; j < valLength; j++) {
- if (read(arr, i + j) !== read(val, j)) {
- found = false
- break
- }
- }
- if (found) return i
- }
- }
-
- return -1
-}
-
-Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
- return this.indexOf(val, byteOffset, encoding) !== -1
-}
-
-Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
- return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
-}
-
-Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
- return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
-}
-
-function hexWrite (buf, string, offset, length) {
- offset = Number(offset) || 0
- var remaining = buf.length - offset
- if (!length) {
- length = remaining
- } else {
- length = Number(length)
- if (length > remaining) {
- length = remaining
- }
- }
-
- var strLen = string.length
-
- if (length > strLen / 2) {
- length = strLen / 2
- }
- for (var i = 0; i < length; ++i) {
- var parsed = parseInt(string.substr(i * 2, 2), 16)
- if (numberIsNaN(parsed)) return i
- buf[offset + i] = parsed
- }
- return i
-}
-
-function utf8Write (buf, string, offset, length) {
- return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
-}
-
-function asciiWrite (buf, string, offset, length) {
- return blitBuffer(asciiToBytes(string), buf, offset, length)
-}
-
-function latin1Write (buf, string, offset, length) {
- return asciiWrite(buf, string, offset, length)
-}
-
-function base64Write (buf, string, offset, length) {
- return blitBuffer(base64ToBytes(string), buf, offset, length)
-}
-
-function ucs2Write (buf, string, offset, length) {
- return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
-}
-
-Buffer.prototype.write = function write (string, offset, length, encoding) {
- // Buffer#write(string)
- if (offset === undefined) {
- encoding = 'utf8'
- length = this.length
- offset = 0
- // Buffer#write(string, encoding)
- } else if (length === undefined && typeof offset === 'string') {
- encoding = offset
- length = this.length
- offset = 0
- // Buffer#write(string, offset[, length][, encoding])
- } else if (isFinite(offset)) {
- offset = offset >>> 0
- if (isFinite(length)) {
- length = length >>> 0
- if (encoding === undefined) encoding = 'utf8'
- } else {
- encoding = length
- length = undefined
- }
- } else {
- throw new Error(
- 'Buffer.write(string, encoding, offset[, length]) is no longer supported'
- )
- }
-
- var remaining = this.length - offset
- if (length === undefined || length > remaining) length = remaining
-
- if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
- throw new RangeError('Attempt to write outside buffer bounds')
- }
-
- if (!encoding) encoding = 'utf8'
-
- var loweredCase = false
- for (;;) {
- switch (encoding) {
- case 'hex':
- return hexWrite(this, string, offset, length)
-
- case 'utf8':
- case 'utf-8':
- return utf8Write(this, string, offset, length)
-
- case 'ascii':
- return asciiWrite(this, string, offset, length)
-
- case 'latin1':
- case 'binary':
- return latin1Write(this, string, offset, length)
-
- case 'base64':
- // Warning: maxLength not taken into account in base64Write
- return base64Write(this, string, offset, length)
-
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return ucs2Write(this, string, offset, length)
-
- default:
- if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
- encoding = ('' + encoding).toLowerCase()
- loweredCase = true
- }
- }
-}
-
-Buffer.prototype.toJSON = function toJSON () {
- return {
- type: 'Buffer',
- data: Array.prototype.slice.call(this._arr || this, 0)
- }
-}
-
-function base64Slice (buf, start, end) {
- if (start === 0 && end === buf.length) {
- return base64.fromByteArray(buf)
- } else {
- return base64.fromByteArray(buf.slice(start, end))
- }
-}
-
-function utf8Slice (buf, start, end) {
- end = Math.min(buf.length, end)
- var res = []
-
- var i = start
- while (i < end) {
- var firstByte = buf[i]
- var codePoint = null
- var bytesPerSequence = (firstByte > 0xEF) ? 4
- : (firstByte > 0xDF) ? 3
- : (firstByte > 0xBF) ? 2
- : 1
-
- if (i + bytesPerSequence <= end) {
- var secondByte, thirdByte, fourthByte, tempCodePoint
-
- switch (bytesPerSequence) {
- case 1:
- if (firstByte < 0x80) {
- codePoint = firstByte
- }
- break
- case 2:
- secondByte = buf[i + 1]
- if ((secondByte & 0xC0) === 0x80) {
- tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F)
- if (tempCodePoint > 0x7F) {
- codePoint = tempCodePoint
- }
- }
- break
- case 3:
- secondByte = buf[i + 1]
- thirdByte = buf[i + 2]
- if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
- tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F)
- if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
- codePoint = tempCodePoint
- }
- }
- break
- case 4:
- secondByte = buf[i + 1]
- thirdByte = buf[i + 2]
- fourthByte = buf[i + 3]
- if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
- tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F)
- if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
- codePoint = tempCodePoint
- }
- }
- }
- }
-
- if (codePoint === null) {
- // we did not generate a valid codePoint so insert a
- // replacement char (U+FFFD) and advance only 1 byte
- codePoint = 0xFFFD
- bytesPerSequence = 1
- } else if (codePoint > 0xFFFF) {
- // encode to utf16 (surrogate pair dance)
- codePoint -= 0x10000
- res.push(codePoint >>> 10 & 0x3FF | 0xD800)
- codePoint = 0xDC00 | codePoint & 0x3FF
- }
-
- res.push(codePoint)
- i += bytesPerSequence
- }
-
- return decodeCodePointsArray(res)
-}
-
-// Based on http://stackoverflow.com/a/22747272/680742, the browser with
-// the lowest limit is Chrome, with 0x10000 args.
-// We go 1 magnitude less, for safety
-var MAX_ARGUMENTS_LENGTH = 0x1000
-
-function decodeCodePointsArray (codePoints) {
- var len = codePoints.length
- if (len <= MAX_ARGUMENTS_LENGTH) {
- return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
- }
-
- // Decode in chunks to avoid "call stack size exceeded".
- var res = ''
- var i = 0
- while (i < len) {
- res += String.fromCharCode.apply(
- String,
- codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
- )
- }
- return res
-}
-
-function asciiSlice (buf, start, end) {
- var ret = ''
- end = Math.min(buf.length, end)
-
- for (var i = start; i < end; ++i) {
- ret += String.fromCharCode(buf[i] & 0x7F)
- }
- return ret
-}
-
-function latin1Slice (buf, start, end) {
- var ret = ''
- end = Math.min(buf.length, end)
-
- for (var i = start; i < end; ++i) {
- ret += String.fromCharCode(buf[i])
- }
- return ret
-}
-
-function hexSlice (buf, start, end) {
- var len = buf.length
-
- if (!start || start < 0) start = 0
- if (!end || end < 0 || end > len) end = len
-
- var out = ''
- for (var i = start; i < end; ++i) {
- out += toHex(buf[i])
- }
- return out
-}
-
-function utf16leSlice (buf, start, end) {
- var bytes = buf.slice(start, end)
- var res = ''
- for (var i = 0; i < bytes.length; i += 2) {
- res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256))
- }
- return res
-}
-
-Buffer.prototype.slice = function slice (start, end) {
- var len = this.length
- start = ~~start
- end = end === undefined ? len : ~~end
-
- if (start < 0) {
- start += len
- if (start < 0) start = 0
- } else if (start > len) {
- start = len
- }
-
- if (end < 0) {
- end += len
- if (end < 0) end = 0
- } else if (end > len) {
- end = len
- }
-
- if (end < start) end = start
-
- var newBuf = this.subarray(start, end)
- // Return an augmented `Uint8Array` instance
- newBuf.__proto__ = Buffer.prototype
- return newBuf
-}
-
-/*
- * Need to make sure that buffer isn't trying to write out of bounds.
- */
-function checkOffset (offset, ext, length) {
- if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
- if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
-}
-
-Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) checkOffset(offset, byteLength, this.length)
-
- var val = this[offset]
- var mul = 1
- var i = 0
- while (++i < byteLength && (mul *= 0x100)) {
- val += this[offset + i] * mul
- }
-
- return val
-}
-
-Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) {
- checkOffset(offset, byteLength, this.length)
- }
-
- var val = this[offset + --byteLength]
- var mul = 1
- while (byteLength > 0 && (mul *= 0x100)) {
- val += this[offset + --byteLength] * mul
- }
-
- return val
-}
-
-Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 1, this.length)
- return this[offset]
-}
-
-Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 2, this.length)
- return this[offset] | (this[offset + 1] << 8)
-}
-
-Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 2, this.length)
- return (this[offset] << 8) | this[offset + 1]
-}
-
-Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return ((this[offset]) |
- (this[offset + 1] << 8) |
- (this[offset + 2] << 16)) +
- (this[offset + 3] * 0x1000000)
-}
-
-Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return (this[offset] * 0x1000000) +
- ((this[offset + 1] << 16) |
- (this[offset + 2] << 8) |
- this[offset + 3])
-}
-
-Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) checkOffset(offset, byteLength, this.length)
-
- var val = this[offset]
- var mul = 1
- var i = 0
- while (++i < byteLength && (mul *= 0x100)) {
- val += this[offset + i] * mul
- }
- mul *= 0x80
-
- if (val >= mul) val -= Math.pow(2, 8 * byteLength)
-
- return val
-}
-
-Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) checkOffset(offset, byteLength, this.length)
-
- var i = byteLength
- var mul = 1
- var val = this[offset + --i]
- while (i > 0 && (mul *= 0x100)) {
- val += this[offset + --i] * mul
- }
- mul *= 0x80
-
- if (val >= mul) val -= Math.pow(2, 8 * byteLength)
-
- return val
-}
-
-Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 1, this.length)
- if (!(this[offset] & 0x80)) return (this[offset])
- return ((0xff - this[offset] + 1) * -1)
-}
-
-Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 2, this.length)
- var val = this[offset] | (this[offset + 1] << 8)
- return (val & 0x8000) ? val | 0xFFFF0000 : val
-}
-
-Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 2, this.length)
- var val = this[offset + 1] | (this[offset] << 8)
- return (val & 0x8000) ? val | 0xFFFF0000 : val
-}
-
-Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return (this[offset]) |
- (this[offset + 1] << 8) |
- (this[offset + 2] << 16) |
- (this[offset + 3] << 24)
-}
-
-Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return (this[offset] << 24) |
- (this[offset + 1] << 16) |
- (this[offset + 2] << 8) |
- (this[offset + 3])
-}
-
-Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
- return ieee754.read(this, offset, true, 23, 4)
-}
-
-Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 4, this.length)
- return ieee754.read(this, offset, false, 23, 4)
-}
-
-Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 8, this.length)
- return ieee754.read(this, offset, true, 52, 8)
-}
-
-Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
- offset = offset >>> 0
- if (!noAssert) checkOffset(offset, 8, this.length)
- return ieee754.read(this, offset, false, 52, 8)
-}
-
-function checkInt (buf, value, offset, ext, max, min) {
- if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
- if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
- if (offset + ext > buf.length) throw new RangeError('Index out of range')
-}
-
-Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) {
- var maxBytes = Math.pow(2, 8 * byteLength) - 1
- checkInt(this, value, offset, byteLength, maxBytes, 0)
- }
-
- var mul = 1
- var i = 0
- this[offset] = value & 0xFF
- while (++i < byteLength && (mul *= 0x100)) {
- this[offset + i] = (value / mul) & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset >>> 0
- byteLength = byteLength >>> 0
- if (!noAssert) {
- var maxBytes = Math.pow(2, 8 * byteLength) - 1
- checkInt(this, value, offset, byteLength, maxBytes, 0)
- }
-
- var i = byteLength - 1
- var mul = 1
- this[offset + i] = value & 0xFF
- while (--i >= 0 && (mul *= 0x100)) {
- this[offset + i] = (value / mul) & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0)
- this[offset] = (value & 0xff)
- return offset + 1
-}
-
-Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
- this[offset] = (value & 0xff)
- this[offset + 1] = (value >>> 8)
- return offset + 2
-}
-
-Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
- this[offset] = (value >>> 8)
- this[offset + 1] = (value & 0xff)
- return offset + 2
-}
-
-Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
- this[offset + 3] = (value >>> 24)
- this[offset + 2] = (value >>> 16)
- this[offset + 1] = (value >>> 8)
- this[offset] = (value & 0xff)
- return offset + 4
-}
-
-Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
- this[offset] = (value >>> 24)
- this[offset + 1] = (value >>> 16)
- this[offset + 2] = (value >>> 8)
- this[offset + 3] = (value & 0xff)
- return offset + 4
-}
-
-Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) {
- var limit = Math.pow(2, (8 * byteLength) - 1)
-
- checkInt(this, value, offset, byteLength, limit - 1, -limit)
- }
-
- var i = 0
- var mul = 1
- var sub = 0
- this[offset] = value & 0xFF
- while (++i < byteLength && (mul *= 0x100)) {
- if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
- sub = 1
- }
- this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) {
- var limit = Math.pow(2, (8 * byteLength) - 1)
-
- checkInt(this, value, offset, byteLength, limit - 1, -limit)
- }
-
- var i = byteLength - 1
- var mul = 1
- var sub = 0
- this[offset + i] = value & 0xFF
- while (--i >= 0 && (mul *= 0x100)) {
- if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
- sub = 1
- }
- this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80)
- if (value < 0) value = 0xff + value + 1
- this[offset] = (value & 0xff)
- return offset + 1
-}
-
-Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
- this[offset] = (value & 0xff)
- this[offset + 1] = (value >>> 8)
- return offset + 2
-}
-
-Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
- this[offset] = (value >>> 8)
- this[offset + 1] = (value & 0xff)
- return offset + 2
-}
-
-Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
- this[offset] = (value & 0xff)
- this[offset + 1] = (value >>> 8)
- this[offset + 2] = (value >>> 16)
- this[offset + 3] = (value >>> 24)
- return offset + 4
-}
-
-Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
- if (value < 0) value = 0xffffffff + value + 1
- this[offset] = (value >>> 24)
- this[offset + 1] = (value >>> 16)
- this[offset + 2] = (value >>> 8)
- this[offset + 3] = (value & 0xff)
- return offset + 4
-}
-
-function checkIEEE754 (buf, value, offset, ext, max, min) {
- if (offset + ext > buf.length) throw new RangeError('Index out of range')
- if (offset < 0) throw new RangeError('Index out of range')
-}
-
-function writeFloat (buf, value, offset, littleEndian, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) {
- checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)
- }
- ieee754.write(buf, value, offset, littleEndian, 23, 4)
- return offset + 4
-}
-
-Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
- return writeFloat(this, value, offset, true, noAssert)
-}
-
-Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
- return writeFloat(this, value, offset, false, noAssert)
-}
-
-function writeDouble (buf, value, offset, littleEndian, noAssert) {
- value = +value
- offset = offset >>> 0
- if (!noAssert) {
- checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)
- }
- ieee754.write(buf, value, offset, littleEndian, 52, 8)
- return offset + 8
-}
-
-Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
- return writeDouble(this, value, offset, true, noAssert)
-}
-
-Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
- return writeDouble(this, value, offset, false, noAssert)
-}
-
-// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
-Buffer.prototype.copy = function copy (target, targetStart, start, end) {
- if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer')
- if (!start) start = 0
- if (!end && end !== 0) end = this.length
- if (targetStart >= target.length) targetStart = target.length
- if (!targetStart) targetStart = 0
- if (end > 0 && end < start) end = start
-
- // Copy 0 bytes; we're done
- if (end === start) return 0
- if (target.length === 0 || this.length === 0) return 0
-
- // Fatal error conditions
- if (targetStart < 0) {
- throw new RangeError('targetStart out of bounds')
- }
- if (start < 0 || start >= this.length) throw new RangeError('Index out of range')
- if (end < 0) throw new RangeError('sourceEnd out of bounds')
-
- // Are we oob?
- if (end > this.length) end = this.length
- if (target.length - targetStart < end - start) {
- end = target.length - targetStart + start
- }
-
- var len = end - start
-
- if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') {
- // Use built-in when available, missing from IE11
- this.copyWithin(targetStart, start, end)
- } else if (this === target && start < targetStart && targetStart < end) {
- // descending copy from end
- for (var i = len - 1; i >= 0; --i) {
- target[i + targetStart] = this[i + start]
- }
- } else {
- Uint8Array.prototype.set.call(
- target,
- this.subarray(start, end),
- targetStart
- )
- }
-
- return len
-}
-
-// Usage:
-// buffer.fill(number[, offset[, end]])
-// buffer.fill(buffer[, offset[, end]])
-// buffer.fill(string[, offset[, end]][, encoding])
-Buffer.prototype.fill = function fill (val, start, end, encoding) {
- // Handle string cases:
- if (typeof val === 'string') {
- if (typeof start === 'string') {
- encoding = start
- start = 0
- end = this.length
- } else if (typeof end === 'string') {
- encoding = end
- end = this.length
- }
- if (encoding !== undefined && typeof encoding !== 'string') {
- throw new TypeError('encoding must be a string')
- }
- if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
- throw new TypeError('Unknown encoding: ' + encoding)
- }
- if (val.length === 1) {
- var code = val.charCodeAt(0)
- if ((encoding === 'utf8' && code < 128) ||
- encoding === 'latin1') {
- // Fast path: If `val` fits into a single byte, use that numeric value.
- val = code
- }
- }
- } else if (typeof val === 'number') {
- val = val & 255
- }
-
- // Invalid ranges are not set to a default, so can range check early.
- if (start < 0 || this.length < start || this.length < end) {
- throw new RangeError('Out of range index')
- }
-
- if (end <= start) {
- return this
- }
-
- start = start >>> 0
- end = end === undefined ? this.length : end >>> 0
-
- if (!val) val = 0
-
- var i
- if (typeof val === 'number') {
- for (i = start; i < end; ++i) {
- this[i] = val
- }
- } else {
- var bytes = Buffer.isBuffer(val)
- ? val
- : Buffer.from(val, encoding)
- var len = bytes.length
- if (len === 0) {
- throw new TypeError('The value "' + val +
- '" is invalid for argument "value"')
- }
- for (i = 0; i < end - start; ++i) {
- this[i + start] = bytes[i % len]
- }
- }
-
- return this
-}
-
-// HELPER FUNCTIONS
-// ================
-
-var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g
-
-function base64clean (str) {
- // Node takes equal signs as end of the Base64 encoding
- str = str.split('=')[0]
- // Node strips out invalid characters like \n and \t from the string, base64-js does not
- str = str.trim().replace(INVALID_BASE64_RE, '')
- // Node converts strings with length < 2 to ''
- if (str.length < 2) return ''
- // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
- while (str.length % 4 !== 0) {
- str = str + '='
- }
- return str
-}
-
-function toHex (n) {
- if (n < 16) return '0' + n.toString(16)
- return n.toString(16)
-}
-
-function utf8ToBytes (string, units) {
- units = units || Infinity
- var codePoint
- var length = string.length
- var leadSurrogate = null
- var bytes = []
-
- for (var i = 0; i < length; ++i) {
- codePoint = string.charCodeAt(i)
-
- // is surrogate component
- if (codePoint > 0xD7FF && codePoint < 0xE000) {
- // last char was a lead
- if (!leadSurrogate) {
- // no lead yet
- if (codePoint > 0xDBFF) {
- // unexpected trail
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- continue
- } else if (i + 1 === length) {
- // unpaired lead
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- continue
- }
-
- // valid lead
- leadSurrogate = codePoint
-
- continue
- }
-
- // 2 leads in a row
- if (codePoint < 0xDC00) {
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- leadSurrogate = codePoint
- continue
- }
-
- // valid surrogate pair
- codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000
- } else if (leadSurrogate) {
- // valid bmp char, but last char was a lead
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- }
-
- leadSurrogate = null
-
- // encode utf8
- if (codePoint < 0x80) {
- if ((units -= 1) < 0) break
- bytes.push(codePoint)
- } else if (codePoint < 0x800) {
- if ((units -= 2) < 0) break
- bytes.push(
- codePoint >> 0x6 | 0xC0,
- codePoint & 0x3F | 0x80
- )
- } else if (codePoint < 0x10000) {
- if ((units -= 3) < 0) break
- bytes.push(
- codePoint >> 0xC | 0xE0,
- codePoint >> 0x6 & 0x3F | 0x80,
- codePoint & 0x3F | 0x80
- )
- } else if (codePoint < 0x110000) {
- if ((units -= 4) < 0) break
- bytes.push(
- codePoint >> 0x12 | 0xF0,
- codePoint >> 0xC & 0x3F | 0x80,
- codePoint >> 0x6 & 0x3F | 0x80,
- codePoint & 0x3F | 0x80
- )
- } else {
- throw new Error('Invalid code point')
- }
- }
-
- return bytes
-}
-
-function asciiToBytes (str) {
- var byteArray = []
- for (var i = 0; i < str.length; ++i) {
- // Node's code seems to be doing this and not & 0x7F..
- byteArray.push(str.charCodeAt(i) & 0xFF)
- }
- return byteArray
-}
-
-function utf16leToBytes (str, units) {
- var c, hi, lo
- var byteArray = []
- for (var i = 0; i < str.length; ++i) {
- if ((units -= 2) < 0) break
-
- c = str.charCodeAt(i)
- hi = c >> 8
- lo = c % 256
- byteArray.push(lo)
- byteArray.push(hi)
- }
-
- return byteArray
-}
-
-function base64ToBytes (str) {
- return base64.toByteArray(base64clean(str))
-}
-
-function blitBuffer (src, dst, offset, length) {
- for (var i = 0; i < length; ++i) {
- if ((i + offset >= dst.length) || (i >= src.length)) break
- dst[i + offset] = src[i]
- }
- return i
-}
-
-// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass
-// the `instanceof` check but they should be treated as of that type.
-// See: https://github.com/feross/buffer/issues/166
-function isInstance (obj, type) {
- return obj instanceof type ||
- (obj != null && obj.constructor != null && obj.constructor.name != null &&
- obj.constructor.name === type.name)
-}
-function numberIsNaN (obj) {
- // For IE11 support
- return obj !== obj // eslint-disable-line no-self-compare
-}
-
-}).call(this)}).call(this,require("buffer").Buffer)
-},{"base64-js":8,"buffer":10,"ieee754":22}],11:[function(require,module,exports){
-'use strict';
-
-var GetIntrinsic = require('get-intrinsic');
-
-var callBind = require('./');
-
-var $indexOf = callBind(GetIntrinsic('String.prototype.indexOf'));
-
-module.exports = function callBoundIntrinsic(name, allowMissing) {
- var intrinsic = GetIntrinsic(name, !!allowMissing);
- if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) {
- return callBind(intrinsic);
- }
- return intrinsic;
-};
-
-},{"./":12,"get-intrinsic":18}],12:[function(require,module,exports){
-'use strict';
-
-var bind = require('function-bind');
-var GetIntrinsic = require('get-intrinsic');
-
-var $apply = GetIntrinsic('%Function.prototype.apply%');
-var $call = GetIntrinsic('%Function.prototype.call%');
-var $reflectApply = GetIntrinsic('%Reflect.apply%', true) || bind.call($call, $apply);
-
-var $gOPD = GetIntrinsic('%Object.getOwnPropertyDescriptor%', true);
-var $defineProperty = GetIntrinsic('%Object.defineProperty%', true);
-var $max = GetIntrinsic('%Math.max%');
-
-if ($defineProperty) {
- try {
- $defineProperty({}, 'a', { value: 1 });
- } catch (e) {
- // IE 8 has a broken defineProperty
- $defineProperty = null;
- }
-}
-
-module.exports = function callBind(originalFunction) {
- var func = $reflectApply(bind, $call, arguments);
- if ($gOPD && $defineProperty) {
- var desc = $gOPD(func, 'length');
- if (desc.configurable) {
- // original length, plus the receiver, minus any additional arguments (after the receiver)
- $defineProperty(
- func,
- 'length',
- { value: 1 + $max(0, originalFunction.length - (arguments.length - 1)) }
- );
- }
- }
- return func;
-};
-
-var applyBind = function applyBind() {
- return $reflectApply(bind, $apply, arguments);
-};
-
-if ($defineProperty) {
- $defineProperty(module.exports, 'apply', { value: applyBind });
-} else {
- module.exports.apply = applyBind;
-}
-
-},{"function-bind":17,"get-intrinsic":18}],13:[function(require,module,exports){
-'use strict';
-
-var GetIntrinsic = require('get-intrinsic');
-
-var $gOPD = GetIntrinsic('%Object.getOwnPropertyDescriptor%');
-if ($gOPD) {
- try {
- $gOPD([], 'length');
- } catch (e) {
- // IE 8 has a broken gOPD
- $gOPD = null;
- }
-}
-
-module.exports = $gOPD;
-
-},{"get-intrinsic":18}],14:[function(require,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-'use strict';
-
-var R = typeof Reflect === 'object' ? Reflect : null
-var ReflectApply = R && typeof R.apply === 'function'
- ? R.apply
- : function ReflectApply(target, receiver, args) {
- return Function.prototype.apply.call(target, receiver, args);
- }
-
-var ReflectOwnKeys
-if (R && typeof R.ownKeys === 'function') {
- ReflectOwnKeys = R.ownKeys
-} else if (Object.getOwnPropertySymbols) {
- ReflectOwnKeys = function ReflectOwnKeys(target) {
- return Object.getOwnPropertyNames(target)
- .concat(Object.getOwnPropertySymbols(target));
- };
-} else {
- ReflectOwnKeys = function ReflectOwnKeys(target) {
- return Object.getOwnPropertyNames(target);
- };
-}
-
-function ProcessEmitWarning(warning) {
- if (console && console.warn) console.warn(warning);
-}
-
-var NumberIsNaN = Number.isNaN || function NumberIsNaN(value) {
- return value !== value;
-}
-
-function EventEmitter() {
- EventEmitter.init.call(this);
-}
-module.exports = EventEmitter;
-module.exports.once = once;
-
-// Backwards-compat with node 0.10.x
-EventEmitter.EventEmitter = EventEmitter;
-
-EventEmitter.prototype._events = undefined;
-EventEmitter.prototype._eventsCount = 0;
-EventEmitter.prototype._maxListeners = undefined;
-
-// By default EventEmitters will print a warning if more than 10 listeners are
-// added to it. This is a useful default which helps finding memory leaks.
-var defaultMaxListeners = 10;
-
-function checkListener(listener) {
- if (typeof listener !== 'function') {
- throw new TypeError('The "listener" argument must be of type Function. Received type ' + typeof listener);
- }
-}
-
-Object.defineProperty(EventEmitter, 'defaultMaxListeners', {
- enumerable: true,
- get: function() {
- return defaultMaxListeners;
- },
- set: function(arg) {
- if (typeof arg !== 'number' || arg < 0 || NumberIsNaN(arg)) {
- throw new RangeError('The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + arg + '.');
- }
- defaultMaxListeners = arg;
- }
-});
-
-EventEmitter.init = function() {
-
- if (this._events === undefined ||
- this._events === Object.getPrototypeOf(this)._events) {
- this._events = Object.create(null);
- this._eventsCount = 0;
- }
-
- this._maxListeners = this._maxListeners || undefined;
-};
-
-// Obviously not all Emitters should be limited to 10. This function allows
-// that to be increased. Set to zero for unlimited.
-EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) {
- if (typeof n !== 'number' || n < 0 || NumberIsNaN(n)) {
- throw new RangeError('The value of "n" is out of range. It must be a non-negative number. Received ' + n + '.');
- }
- this._maxListeners = n;
- return this;
-};
-
-function _getMaxListeners(that) {
- if (that._maxListeners === undefined)
- return EventEmitter.defaultMaxListeners;
- return that._maxListeners;
-}
-
-EventEmitter.prototype.getMaxListeners = function getMaxListeners() {
- return _getMaxListeners(this);
-};
-
-EventEmitter.prototype.emit = function emit(type) {
- var args = [];
- for (var i = 1; i < arguments.length; i++) args.push(arguments[i]);
- var doError = (type === 'error');
-
- var events = this._events;
- if (events !== undefined)
- doError = (doError && events.error === undefined);
- else if (!doError)
- return false;
-
- // If there is no 'error' event listener then throw.
- if (doError) {
- var er;
- if (args.length > 0)
- er = args[0];
- if (er instanceof Error) {
- // Note: The comments on the `throw` lines are intentional, they show
- // up in Node's output if this results in an unhandled exception.
- throw er; // Unhandled 'error' event
- }
- // At least give some kind of context to the user
- var err = new Error('Unhandled error.' + (er ? ' (' + er.message + ')' : ''));
- err.context = er;
- throw err; // Unhandled 'error' event
- }
-
- var handler = events[type];
-
- if (handler === undefined)
- return false;
-
- if (typeof handler === 'function') {
- ReflectApply(handler, this, args);
- } else {
- var len = handler.length;
- var listeners = arrayClone(handler, len);
- for (var i = 0; i < len; ++i)
- ReflectApply(listeners[i], this, args);
- }
-
- return true;
-};
-
-function _addListener(target, type, listener, prepend) {
- var m;
- var events;
- var existing;
-
- checkListener(listener);
-
- events = target._events;
- if (events === undefined) {
- events = target._events = Object.create(null);
- target._eventsCount = 0;
- } else {
- // To avoid recursion in the case that type === "newListener"! Before
- // adding it to the listeners, first emit "newListener".
- if (events.newListener !== undefined) {
- target.emit('newListener', type,
- listener.listener ? listener.listener : listener);
-
- // Re-assign `events` because a newListener handler could have caused the
- // this._events to be assigned to a new object
- events = target._events;
- }
- existing = events[type];
- }
-
- if (existing === undefined) {
- // Optimize the case of one listener. Don't need the extra array object.
- existing = events[type] = listener;
- ++target._eventsCount;
- } else {
- if (typeof existing === 'function') {
- // Adding the second element, need to change to array.
- existing = events[type] =
- prepend ? [listener, existing] : [existing, listener];
- // If we've already got an array, just append.
- } else if (prepend) {
- existing.unshift(listener);
- } else {
- existing.push(listener);
- }
-
- // Check for listener leak
- m = _getMaxListeners(target);
- if (m > 0 && existing.length > m && !existing.warned) {
- existing.warned = true;
- // No error code for this since it is a Warning
- // eslint-disable-next-line no-restricted-syntax
- var w = new Error('Possible EventEmitter memory leak detected. ' +
- existing.length + ' ' + String(type) + ' listeners ' +
- 'added. Use emitter.setMaxListeners() to ' +
- 'increase limit');
- w.name = 'MaxListenersExceededWarning';
- w.emitter = target;
- w.type = type;
- w.count = existing.length;
- ProcessEmitWarning(w);
- }
- }
-
- return target;
-}
-
-EventEmitter.prototype.addListener = function addListener(type, listener) {
- return _addListener(this, type, listener, false);
-};
-
-EventEmitter.prototype.on = EventEmitter.prototype.addListener;
-
-EventEmitter.prototype.prependListener =
- function prependListener(type, listener) {
- return _addListener(this, type, listener, true);
- };
-
-function onceWrapper() {
- if (!this.fired) {
- this.target.removeListener(this.type, this.wrapFn);
- this.fired = true;
- if (arguments.length === 0)
- return this.listener.call(this.target);
- return this.listener.apply(this.target, arguments);
- }
-}
-
-function _onceWrap(target, type, listener) {
- var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener };
- var wrapped = onceWrapper.bind(state);
- wrapped.listener = listener;
- state.wrapFn = wrapped;
- return wrapped;
-}
-
-EventEmitter.prototype.once = function once(type, listener) {
- checkListener(listener);
- this.on(type, _onceWrap(this, type, listener));
- return this;
-};
-
-EventEmitter.prototype.prependOnceListener =
- function prependOnceListener(type, listener) {
- checkListener(listener);
- this.prependListener(type, _onceWrap(this, type, listener));
- return this;
- };
-
-// Emits a 'removeListener' event if and only if the listener was removed.
-EventEmitter.prototype.removeListener =
- function removeListener(type, listener) {
- var list, events, position, i, originalListener;
-
- checkListener(listener);
-
- events = this._events;
- if (events === undefined)
- return this;
-
- list = events[type];
- if (list === undefined)
- return this;
-
- if (list === listener || list.listener === listener) {
- if (--this._eventsCount === 0)
- this._events = Object.create(null);
- else {
- delete events[type];
- if (events.removeListener)
- this.emit('removeListener', type, list.listener || listener);
- }
- } else if (typeof list !== 'function') {
- position = -1;
-
- for (i = list.length - 1; i >= 0; i--) {
- if (list[i] === listener || list[i].listener === listener) {
- originalListener = list[i].listener;
- position = i;
- break;
- }
- }
-
- if (position < 0)
- return this;
-
- if (position === 0)
- list.shift();
- else {
- spliceOne(list, position);
- }
-
- if (list.length === 1)
- events[type] = list[0];
-
- if (events.removeListener !== undefined)
- this.emit('removeListener', type, originalListener || listener);
- }
-
- return this;
- };
-
-EventEmitter.prototype.off = EventEmitter.prototype.removeListener;
-
-EventEmitter.prototype.removeAllListeners =
- function removeAllListeners(type) {
- var listeners, events, i;
-
- events = this._events;
- if (events === undefined)
- return this;
-
- // not listening for removeListener, no need to emit
- if (events.removeListener === undefined) {
- if (arguments.length === 0) {
- this._events = Object.create(null);
- this._eventsCount = 0;
- } else if (events[type] !== undefined) {
- if (--this._eventsCount === 0)
- this._events = Object.create(null);
- else
- delete events[type];
- }
- return this;
- }
-
- // emit removeListener for all listeners on all events
- if (arguments.length === 0) {
- var keys = Object.keys(events);
- var key;
- for (i = 0; i < keys.length; ++i) {
- key = keys[i];
- if (key === 'removeListener') continue;
- this.removeAllListeners(key);
- }
- this.removeAllListeners('removeListener');
- this._events = Object.create(null);
- this._eventsCount = 0;
- return this;
- }
-
- listeners = events[type];
-
- if (typeof listeners === 'function') {
- this.removeListener(type, listeners);
- } else if (listeners !== undefined) {
- // LIFO order
- for (i = listeners.length - 1; i >= 0; i--) {
- this.removeListener(type, listeners[i]);
- }
- }
-
- return this;
- };
-
-function _listeners(target, type, unwrap) {
- var events = target._events;
-
- if (events === undefined)
- return [];
-
- var evlistener = events[type];
- if (evlistener === undefined)
- return [];
-
- if (typeof evlistener === 'function')
- return unwrap ? [evlistener.listener || evlistener] : [evlistener];
-
- return unwrap ?
- unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length);
-}
-
-EventEmitter.prototype.listeners = function listeners(type) {
- return _listeners(this, type, true);
-};
-
-EventEmitter.prototype.rawListeners = function rawListeners(type) {
- return _listeners(this, type, false);
-};
-
-EventEmitter.listenerCount = function(emitter, type) {
- if (typeof emitter.listenerCount === 'function') {
- return emitter.listenerCount(type);
- } else {
- return listenerCount.call(emitter, type);
- }
-};
-
-EventEmitter.prototype.listenerCount = listenerCount;
-function listenerCount(type) {
- var events = this._events;
-
- if (events !== undefined) {
- var evlistener = events[type];
-
- if (typeof evlistener === 'function') {
- return 1;
- } else if (evlistener !== undefined) {
- return evlistener.length;
- }
- }
-
- return 0;
-}
-
-EventEmitter.prototype.eventNames = function eventNames() {
- return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : [];
-};
-
-function arrayClone(arr, n) {
- var copy = new Array(n);
- for (var i = 0; i < n; ++i)
- copy[i] = arr[i];
- return copy;
-}
-
-function spliceOne(list, index) {
- for (; index + 1 < list.length; index++)
- list[index] = list[index + 1];
- list.pop();
-}
-
-function unwrapListeners(arr) {
- var ret = new Array(arr.length);
- for (var i = 0; i < ret.length; ++i) {
- ret[i] = arr[i].listener || arr[i];
- }
- return ret;
-}
-
-function once(emitter, name) {
- return new Promise(function (resolve, reject) {
- function errorListener(err) {
- emitter.removeListener(name, resolver);
- reject(err);
- }
-
- function resolver() {
- if (typeof emitter.removeListener === 'function') {
- emitter.removeListener('error', errorListener);
- }
- resolve([].slice.call(arguments));
- };
-
- eventTargetAgnosticAddListener(emitter, name, resolver, { once: true });
- if (name !== 'error') {
- addErrorHandlerIfEventEmitter(emitter, errorListener, { once: true });
- }
- });
-}
-
-function addErrorHandlerIfEventEmitter(emitter, handler, flags) {
- if (typeof emitter.on === 'function') {
- eventTargetAgnosticAddListener(emitter, 'error', handler, flags);
- }
-}
-
-function eventTargetAgnosticAddListener(emitter, name, listener, flags) {
- if (typeof emitter.on === 'function') {
- if (flags.once) {
- emitter.once(name, listener);
- } else {
- emitter.on(name, listener);
- }
- } else if (typeof emitter.addEventListener === 'function') {
- // EventTarget does not have `error` event semantics like Node
- // EventEmitters, we do not listen for `error` events here.
- emitter.addEventListener(name, function wrapListener(arg) {
- // IE does not have builtin `{ once: true }` support so we
- // have to do it manually.
- if (flags.once) {
- emitter.removeEventListener(name, wrapListener);
- }
- listener(arg);
- });
- } else {
- throw new TypeError('The "emitter" argument must be of type EventEmitter. Received type ' + typeof emitter);
- }
-}
-
-},{}],15:[function(require,module,exports){
-
-var hasOwn = Object.prototype.hasOwnProperty;
-var toString = Object.prototype.toString;
-
-module.exports = function forEach (obj, fn, ctx) {
- if (toString.call(fn) !== '[object Function]') {
- throw new TypeError('iterator must be a function');
- }
- var l = obj.length;
- if (l === +l) {
- for (var i = 0; i < l; i++) {
- fn.call(ctx, obj[i], i, obj);
- }
- } else {
- for (var k in obj) {
- if (hasOwn.call(obj, k)) {
- fn.call(ctx, obj[k], k, obj);
- }
- }
- }
-};
-
-
-},{}],16:[function(require,module,exports){
-'use strict';
-
-/* eslint no-invalid-this: 1 */
-
-var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible ';
-var slice = Array.prototype.slice;
-var toStr = Object.prototype.toString;
-var funcType = '[object Function]';
-
-module.exports = function bind(that) {
- var target = this;
- if (typeof target !== 'function' || toStr.call(target) !== funcType) {
- throw new TypeError(ERROR_MESSAGE + target);
- }
- var args = slice.call(arguments, 1);
-
- var bound;
- var binder = function () {
- if (this instanceof bound) {
- var result = target.apply(
- this,
- args.concat(slice.call(arguments))
- );
- if (Object(result) === result) {
- return result;
- }
- return this;
- } else {
- return target.apply(
- that,
- args.concat(slice.call(arguments))
- );
- }
- };
-
- var boundLength = Math.max(0, target.length - args.length);
- var boundArgs = [];
- for (var i = 0; i < boundLength; i++) {
- boundArgs.push('$' + i);
- }
-
- bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder);
-
- if (target.prototype) {
- var Empty = function Empty() {};
- Empty.prototype = target.prototype;
- bound.prototype = new Empty();
- Empty.prototype = null;
- }
-
- return bound;
-};
-
-},{}],17:[function(require,module,exports){
-'use strict';
-
-var implementation = require('./implementation');
-
-module.exports = Function.prototype.bind || implementation;
-
-},{"./implementation":16}],18:[function(require,module,exports){
-'use strict';
-
-var undefined;
-
-var $SyntaxError = SyntaxError;
-var $Function = Function;
-var $TypeError = TypeError;
-
-// eslint-disable-next-line consistent-return
-var getEvalledConstructor = function (expressionSyntax) {
- try {
- return $Function('"use strict"; return (' + expressionSyntax + ').constructor;')();
- } catch (e) {}
-};
-
-var $gOPD = Object.getOwnPropertyDescriptor;
-if ($gOPD) {
- try {
- $gOPD({}, '');
- } catch (e) {
- $gOPD = null; // this is IE 8, which has a broken gOPD
- }
-}
-
-var throwTypeError = function () {
- throw new $TypeError();
-};
-var ThrowTypeError = $gOPD
- ? (function () {
- try {
- // eslint-disable-next-line no-unused-expressions, no-caller, no-restricted-properties
- arguments.callee; // IE 8 does not throw here
- return throwTypeError;
- } catch (calleeThrows) {
- try {
- // IE 8 throws on Object.getOwnPropertyDescriptor(arguments, '')
- return $gOPD(arguments, 'callee').get;
- } catch (gOPDthrows) {
- return throwTypeError;
- }
- }
- }())
- : throwTypeError;
-
-var hasSymbols = require('has-symbols')();
-
-var getProto = Object.getPrototypeOf || function (x) { return x.__proto__; }; // eslint-disable-line no-proto
-
-var needsEval = {};
-
-var TypedArray = typeof Uint8Array === 'undefined' ? undefined : getProto(Uint8Array);
-
-var INTRINSICS = {
- '%AggregateError%': typeof AggregateError === 'undefined' ? undefined : AggregateError,
- '%Array%': Array,
- '%ArrayBuffer%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer,
- '%ArrayIteratorPrototype%': hasSymbols ? getProto([][Symbol.iterator]()) : undefined,
- '%AsyncFromSyncIteratorPrototype%': undefined,
- '%AsyncFunction%': needsEval,
- '%AsyncGenerator%': needsEval,
- '%AsyncGeneratorFunction%': needsEval,
- '%AsyncIteratorPrototype%': needsEval,
- '%Atomics%': typeof Atomics === 'undefined' ? undefined : Atomics,
- '%BigInt%': typeof BigInt === 'undefined' ? undefined : BigInt,
- '%Boolean%': Boolean,
- '%DataView%': typeof DataView === 'undefined' ? undefined : DataView,
- '%Date%': Date,
- '%decodeURI%': decodeURI,
- '%decodeURIComponent%': decodeURIComponent,
- '%encodeURI%': encodeURI,
- '%encodeURIComponent%': encodeURIComponent,
- '%Error%': Error,
- '%eval%': eval, // eslint-disable-line no-eval
- '%EvalError%': EvalError,
- '%Float32Array%': typeof Float32Array === 'undefined' ? undefined : Float32Array,
- '%Float64Array%': typeof Float64Array === 'undefined' ? undefined : Float64Array,
- '%FinalizationRegistry%': typeof FinalizationRegistry === 'undefined' ? undefined : FinalizationRegistry,
- '%Function%': $Function,
- '%GeneratorFunction%': needsEval,
- '%Int8Array%': typeof Int8Array === 'undefined' ? undefined : Int8Array,
- '%Int16Array%': typeof Int16Array === 'undefined' ? undefined : Int16Array,
- '%Int32Array%': typeof Int32Array === 'undefined' ? undefined : Int32Array,
- '%isFinite%': isFinite,
- '%isNaN%': isNaN,
- '%IteratorPrototype%': hasSymbols ? getProto(getProto([][Symbol.iterator]())) : undefined,
- '%JSON%': typeof JSON === 'object' ? JSON : undefined,
- '%Map%': typeof Map === 'undefined' ? undefined : Map,
- '%MapIteratorPrototype%': typeof Map === 'undefined' || !hasSymbols ? undefined : getProto(new Map()[Symbol.iterator]()),
- '%Math%': Math,
- '%Number%': Number,
- '%Object%': Object,
- '%parseFloat%': parseFloat,
- '%parseInt%': parseInt,
- '%Promise%': typeof Promise === 'undefined' ? undefined : Promise,
- '%Proxy%': typeof Proxy === 'undefined' ? undefined : Proxy,
- '%RangeError%': RangeError,
- '%ReferenceError%': ReferenceError,
- '%Reflect%': typeof Reflect === 'undefined' ? undefined : Reflect,
- '%RegExp%': RegExp,
- '%Set%': typeof Set === 'undefined' ? undefined : Set,
- '%SetIteratorPrototype%': typeof Set === 'undefined' || !hasSymbols ? undefined : getProto(new Set()[Symbol.iterator]()),
- '%SharedArrayBuffer%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer,
- '%String%': String,
- '%StringIteratorPrototype%': hasSymbols ? getProto(''[Symbol.iterator]()) : undefined,
- '%Symbol%': hasSymbols ? Symbol : undefined,
- '%SyntaxError%': $SyntaxError,
- '%ThrowTypeError%': ThrowTypeError,
- '%TypedArray%': TypedArray,
- '%TypeError%': $TypeError,
- '%Uint8Array%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array,
- '%Uint8ClampedArray%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray,
- '%Uint16Array%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array,
- '%Uint32Array%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array,
- '%URIError%': URIError,
- '%WeakMap%': typeof WeakMap === 'undefined' ? undefined : WeakMap,
- '%WeakRef%': typeof WeakRef === 'undefined' ? undefined : WeakRef,
- '%WeakSet%': typeof WeakSet === 'undefined' ? undefined : WeakSet
-};
-
-var doEval = function doEval(name) {
- var value;
- if (name === '%AsyncFunction%') {
- value = getEvalledConstructor('async function () {}');
- } else if (name === '%GeneratorFunction%') {
- value = getEvalledConstructor('function* () {}');
- } else if (name === '%AsyncGeneratorFunction%') {
- value = getEvalledConstructor('async function* () {}');
- } else if (name === '%AsyncGenerator%') {
- var fn = doEval('%AsyncGeneratorFunction%');
- if (fn) {
- value = fn.prototype;
- }
- } else if (name === '%AsyncIteratorPrototype%') {
- var gen = doEval('%AsyncGenerator%');
- if (gen) {
- value = getProto(gen.prototype);
- }
- }
-
- INTRINSICS[name] = value;
-
- return value;
-};
-
-var LEGACY_ALIASES = {
- '%ArrayBufferPrototype%': ['ArrayBuffer', 'prototype'],
- '%ArrayPrototype%': ['Array', 'prototype'],
- '%ArrayProto_entries%': ['Array', 'prototype', 'entries'],
- '%ArrayProto_forEach%': ['Array', 'prototype', 'forEach'],
- '%ArrayProto_keys%': ['Array', 'prototype', 'keys'],
- '%ArrayProto_values%': ['Array', 'prototype', 'values'],
- '%AsyncFunctionPrototype%': ['AsyncFunction', 'prototype'],
- '%AsyncGenerator%': ['AsyncGeneratorFunction', 'prototype'],
- '%AsyncGeneratorPrototype%': ['AsyncGeneratorFunction', 'prototype', 'prototype'],
- '%BooleanPrototype%': ['Boolean', 'prototype'],
- '%DataViewPrototype%': ['DataView', 'prototype'],
- '%DatePrototype%': ['Date', 'prototype'],
- '%ErrorPrototype%': ['Error', 'prototype'],
- '%EvalErrorPrototype%': ['EvalError', 'prototype'],
- '%Float32ArrayPrototype%': ['Float32Array', 'prototype'],
- '%Float64ArrayPrototype%': ['Float64Array', 'prototype'],
- '%FunctionPrototype%': ['Function', 'prototype'],
- '%Generator%': ['GeneratorFunction', 'prototype'],
- '%GeneratorPrototype%': ['GeneratorFunction', 'prototype', 'prototype'],
- '%Int8ArrayPrototype%': ['Int8Array', 'prototype'],
- '%Int16ArrayPrototype%': ['Int16Array', 'prototype'],
- '%Int32ArrayPrototype%': ['Int32Array', 'prototype'],
- '%JSONParse%': ['JSON', 'parse'],
- '%JSONStringify%': ['JSON', 'stringify'],
- '%MapPrototype%': ['Map', 'prototype'],
- '%NumberPrototype%': ['Number', 'prototype'],
- '%ObjectPrototype%': ['Object', 'prototype'],
- '%ObjProto_toString%': ['Object', 'prototype', 'toString'],
- '%ObjProto_valueOf%': ['Object', 'prototype', 'valueOf'],
- '%PromisePrototype%': ['Promise', 'prototype'],
- '%PromiseProto_then%': ['Promise', 'prototype', 'then'],
- '%Promise_all%': ['Promise', 'all'],
- '%Promise_reject%': ['Promise', 'reject'],
- '%Promise_resolve%': ['Promise', 'resolve'],
- '%RangeErrorPrototype%': ['RangeError', 'prototype'],
- '%ReferenceErrorPrototype%': ['ReferenceError', 'prototype'],
- '%RegExpPrototype%': ['RegExp', 'prototype'],
- '%SetPrototype%': ['Set', 'prototype'],
- '%SharedArrayBufferPrototype%': ['SharedArrayBuffer', 'prototype'],
- '%StringPrototype%': ['String', 'prototype'],
- '%SymbolPrototype%': ['Symbol', 'prototype'],
- '%SyntaxErrorPrototype%': ['SyntaxError', 'prototype'],
- '%TypedArrayPrototype%': ['TypedArray', 'prototype'],
- '%TypeErrorPrototype%': ['TypeError', 'prototype'],
- '%Uint8ArrayPrototype%': ['Uint8Array', 'prototype'],
- '%Uint8ClampedArrayPrototype%': ['Uint8ClampedArray', 'prototype'],
- '%Uint16ArrayPrototype%': ['Uint16Array', 'prototype'],
- '%Uint32ArrayPrototype%': ['Uint32Array', 'prototype'],
- '%URIErrorPrototype%': ['URIError', 'prototype'],
- '%WeakMapPrototype%': ['WeakMap', 'prototype'],
- '%WeakSetPrototype%': ['WeakSet', 'prototype']
-};
-
-var bind = require('function-bind');
-var hasOwn = require('has');
-var $concat = bind.call(Function.call, Array.prototype.concat);
-var $spliceApply = bind.call(Function.apply, Array.prototype.splice);
-var $replace = bind.call(Function.call, String.prototype.replace);
-var $strSlice = bind.call(Function.call, String.prototype.slice);
-
-/* adapted from https://github.com/lodash/lodash/blob/4.17.15/dist/lodash.js#L6735-L6744 */
-var rePropName = /[^%.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|%$))/g;
-var reEscapeChar = /\\(\\)?/g; /** Used to match backslashes in property paths. */
-var stringToPath = function stringToPath(string) {
- var first = $strSlice(string, 0, 1);
- var last = $strSlice(string, -1);
- if (first === '%' && last !== '%') {
- throw new $SyntaxError('invalid intrinsic syntax, expected closing `%`');
- } else if (last === '%' && first !== '%') {
- throw new $SyntaxError('invalid intrinsic syntax, expected opening `%`');
- }
- var result = [];
- $replace(string, rePropName, function (match, number, quote, subString) {
- result[result.length] = quote ? $replace(subString, reEscapeChar, '$1') : number || match;
- });
- return result;
-};
-/* end adaptation */
-
-var getBaseIntrinsic = function getBaseIntrinsic(name, allowMissing) {
- var intrinsicName = name;
- var alias;
- if (hasOwn(LEGACY_ALIASES, intrinsicName)) {
- alias = LEGACY_ALIASES[intrinsicName];
- intrinsicName = '%' + alias[0] + '%';
- }
-
- if (hasOwn(INTRINSICS, intrinsicName)) {
- var value = INTRINSICS[intrinsicName];
- if (value === needsEval) {
- value = doEval(intrinsicName);
- }
- if (typeof value === 'undefined' && !allowMissing) {
- throw new $TypeError('intrinsic ' + name + ' exists, but is not available. Please file an issue!');
- }
-
- return {
- alias: alias,
- name: intrinsicName,
- value: value
- };
- }
-
- throw new $SyntaxError('intrinsic ' + name + ' does not exist!');
-};
-
-module.exports = function GetIntrinsic(name, allowMissing) {
- if (typeof name !== 'string' || name.length === 0) {
- throw new $TypeError('intrinsic name must be a non-empty string');
- }
- if (arguments.length > 1 && typeof allowMissing !== 'boolean') {
- throw new $TypeError('"allowMissing" argument must be a boolean');
- }
-
- var parts = stringToPath(name);
- var intrinsicBaseName = parts.length > 0 ? parts[0] : '';
-
- var intrinsic = getBaseIntrinsic('%' + intrinsicBaseName + '%', allowMissing);
- var intrinsicRealName = intrinsic.name;
- var value = intrinsic.value;
- var skipFurtherCaching = false;
-
- var alias = intrinsic.alias;
- if (alias) {
- intrinsicBaseName = alias[0];
- $spliceApply(parts, $concat([0, 1], alias));
- }
-
- for (var i = 1, isOwn = true; i < parts.length; i += 1) {
- var part = parts[i];
- var first = $strSlice(part, 0, 1);
- var last = $strSlice(part, -1);
- if (
- (
- (first === '"' || first === "'" || first === '`')
- || (last === '"' || last === "'" || last === '`')
- )
- && first !== last
- ) {
- throw new $SyntaxError('property names with quotes must have matching quotes');
- }
- if (part === 'constructor' || !isOwn) {
- skipFurtherCaching = true;
- }
-
- intrinsicBaseName += '.' + part;
- intrinsicRealName = '%' + intrinsicBaseName + '%';
-
- if (hasOwn(INTRINSICS, intrinsicRealName)) {
- value = INTRINSICS[intrinsicRealName];
- } else if (value != null) {
- if (!(part in value)) {
- if (!allowMissing) {
- throw new $TypeError('base intrinsic for ' + name + ' exists, but the property is not available.');
- }
- return void undefined;
- }
- if ($gOPD && (i + 1) >= parts.length) {
- var desc = $gOPD(value, part);
- isOwn = !!desc;
-
- // By convention, when a data property is converted to an accessor
- // property to emulate a data property that does not suffer from
- // the override mistake, that accessor's getter is marked with
- // an `originalValue` property. Here, when we detect this, we
- // uphold the illusion by pretending to see that original data
- // property, i.e., returning the value rather than the getter
- // itself.
- if (isOwn && 'get' in desc && !('originalValue' in desc.get)) {
- value = desc.get;
- } else {
- value = value[part];
- }
- } else {
- isOwn = hasOwn(value, part);
- value = value[part];
- }
-
- if (isOwn && !skipFurtherCaching) {
- INTRINSICS[intrinsicRealName] = value;
- }
- }
- }
- return value;
-};
-
-},{"function-bind":17,"has":21,"has-symbols":19}],19:[function(require,module,exports){
-'use strict';
-
-var origSymbol = typeof Symbol !== 'undefined' && Symbol;
-var hasSymbolSham = require('./shams');
-
-module.exports = function hasNativeSymbols() {
- if (typeof origSymbol !== 'function') { return false; }
- if (typeof Symbol !== 'function') { return false; }
- if (typeof origSymbol('foo') !== 'symbol') { return false; }
- if (typeof Symbol('bar') !== 'symbol') { return false; }
-
- return hasSymbolSham();
-};
-
-},{"./shams":20}],20:[function(require,module,exports){
-'use strict';
-
-/* eslint complexity: [2, 18], max-statements: [2, 33] */
-module.exports = function hasSymbols() {
- if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; }
- if (typeof Symbol.iterator === 'symbol') { return true; }
-
- var obj = {};
- var sym = Symbol('test');
- var symObj = Object(sym);
- if (typeof sym === 'string') { return false; }
-
- if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; }
- if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; }
-
- // temp disabled per https://github.com/ljharb/object.assign/issues/17
- // if (sym instanceof Symbol) { return false; }
- // temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4
- // if (!(symObj instanceof Symbol)) { return false; }
-
- // if (typeof Symbol.prototype.toString !== 'function') { return false; }
- // if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; }
-
- var symVal = 42;
- obj[sym] = symVal;
- for (sym in obj) { return false; } // eslint-disable-line no-restricted-syntax, no-unreachable-loop
- if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; }
-
- if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; }
-
- var syms = Object.getOwnPropertySymbols(obj);
- if (syms.length !== 1 || syms[0] !== sym) { return false; }
-
- if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; }
-
- if (typeof Object.getOwnPropertyDescriptor === 'function') {
- var descriptor = Object.getOwnPropertyDescriptor(obj, sym);
- if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; }
- }
-
- return true;
-};
-
-},{}],21:[function(require,module,exports){
-'use strict';
-
-var bind = require('function-bind');
-
-module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty);
-
-},{"function-bind":17}],22:[function(require,module,exports){
-/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */
-exports.read = function (buffer, offset, isLE, mLen, nBytes) {
- var e, m
- var eLen = (nBytes * 8) - mLen - 1
- var eMax = (1 << eLen) - 1
- var eBias = eMax >> 1
- var nBits = -7
- var i = isLE ? (nBytes - 1) : 0
- var d = isLE ? -1 : 1
- var s = buffer[offset + i]
-
- i += d
-
- e = s & ((1 << (-nBits)) - 1)
- s >>= (-nBits)
- nBits += eLen
- for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {}
-
- m = e & ((1 << (-nBits)) - 1)
- e >>= (-nBits)
- nBits += mLen
- for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {}
-
- if (e === 0) {
- e = 1 - eBias
- } else if (e === eMax) {
- return m ? NaN : ((s ? -1 : 1) * Infinity)
- } else {
- m = m + Math.pow(2, mLen)
- e = e - eBias
- }
- return (s ? -1 : 1) * m * Math.pow(2, e - mLen)
-}
-
-exports.write = function (buffer, value, offset, isLE, mLen, nBytes) {
- var e, m, c
- var eLen = (nBytes * 8) - mLen - 1
- var eMax = (1 << eLen) - 1
- var eBias = eMax >> 1
- var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0)
- var i = isLE ? 0 : (nBytes - 1)
- var d = isLE ? 1 : -1
- var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0
-
- value = Math.abs(value)
-
- if (isNaN(value) || value === Infinity) {
- m = isNaN(value) ? 1 : 0
- e = eMax
- } else {
- e = Math.floor(Math.log(value) / Math.LN2)
- if (value * (c = Math.pow(2, -e)) < 1) {
- e--
- c *= 2
- }
- if (e + eBias >= 1) {
- value += rt / c
- } else {
- value += rt * Math.pow(2, 1 - eBias)
- }
- if (value * c >= 2) {
- e++
- c /= 2
- }
-
- if (e + eBias >= eMax) {
- m = 0
- e = eMax
- } else if (e + eBias >= 1) {
- m = ((value * c) - 1) * Math.pow(2, mLen)
- e = e + eBias
- } else {
- m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen)
- e = 0
- }
- }
-
- for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {}
-
- e = (e << mLen) | m
- eLen += mLen
- for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {}
-
- buffer[offset + i - d] |= s * 128
-}
-
-},{}],23:[function(require,module,exports){
-if (typeof Object.create === 'function') {
- // implementation from standard node.js 'util' module
- module.exports = function inherits(ctor, superCtor) {
- if (superCtor) {
- ctor.super_ = superCtor
- ctor.prototype = Object.create(superCtor.prototype, {
- constructor: {
- value: ctor,
- enumerable: false,
- writable: true,
- configurable: true
- }
- })
- }
- };
-} else {
- // old school shim for old browsers
- module.exports = function inherits(ctor, superCtor) {
- if (superCtor) {
- ctor.super_ = superCtor
- var TempCtor = function () {}
- TempCtor.prototype = superCtor.prototype
- ctor.prototype = new TempCtor()
- ctor.prototype.constructor = ctor
- }
- }
-}
-
-},{}],24:[function(require,module,exports){
-'use strict';
-
-var hasToStringTag = typeof Symbol === 'function' && typeof Symbol.toStringTag === 'symbol';
-var callBound = require('call-bind/callBound');
-
-var $toString = callBound('Object.prototype.toString');
-
-var isStandardArguments = function isArguments(value) {
- if (hasToStringTag && value && typeof value === 'object' && Symbol.toStringTag in value) {
- return false;
- }
- return $toString(value) === '[object Arguments]';
-};
-
-var isLegacyArguments = function isArguments(value) {
- if (isStandardArguments(value)) {
- return true;
- }
- return value !== null &&
- typeof value === 'object' &&
- typeof value.length === 'number' &&
- value.length >= 0 &&
- $toString(value) !== '[object Array]' &&
- $toString(value.callee) === '[object Function]';
-};
-
-var supportsStandardArguments = (function () {
- return isStandardArguments(arguments);
-}());
-
-isStandardArguments.isLegacyArguments = isLegacyArguments; // for tests
-
-module.exports = supportsStandardArguments ? isStandardArguments : isLegacyArguments;
-
-},{"call-bind/callBound":11}],25:[function(require,module,exports){
-'use strict';
-
-var toStr = Object.prototype.toString;
-var fnToStr = Function.prototype.toString;
-var isFnRegex = /^\s*(?:function)?\*/;
-var hasToStringTag = typeof Symbol === 'function' && typeof Symbol.toStringTag === 'symbol';
-var getProto = Object.getPrototypeOf;
-var getGeneratorFunc = function () { // eslint-disable-line consistent-return
- if (!hasToStringTag) {
- return false;
- }
- try {
- return Function('return function*() {}')();
- } catch (e) {
- }
-};
-var GeneratorFunction;
-
-module.exports = function isGeneratorFunction(fn) {
- if (typeof fn !== 'function') {
- return false;
- }
- if (isFnRegex.test(fnToStr.call(fn))) {
- return true;
- }
- if (!hasToStringTag) {
- var str = toStr.call(fn);
- return str === '[object GeneratorFunction]';
- }
- if (!getProto) {
- return false;
- }
- if (typeof GeneratorFunction === 'undefined') {
- var generatorFunc = getGeneratorFunc();
- GeneratorFunction = generatorFunc ? getProto(generatorFunc) : false;
- }
- return getProto(fn) === GeneratorFunction;
-};
-
-},{}],26:[function(require,module,exports){
-(function (global){(function (){
-'use strict';
-
-var forEach = require('foreach');
-var availableTypedArrays = require('available-typed-arrays');
-var callBound = require('call-bind/callBound');
-
-var $toString = callBound('Object.prototype.toString');
-var hasSymbols = require('has-symbols')();
-var hasToStringTag = hasSymbols && typeof Symbol.toStringTag === 'symbol';
-
-var typedArrays = availableTypedArrays();
-
-var $indexOf = callBound('Array.prototype.indexOf', true) || function indexOf(array, value) {
- for (var i = 0; i < array.length; i += 1) {
- if (array[i] === value) {
- return i;
- }
- }
- return -1;
-};
-var $slice = callBound('String.prototype.slice');
-var toStrTags = {};
-var gOPD = require('es-abstract/helpers/getOwnPropertyDescriptor');
-var getPrototypeOf = Object.getPrototypeOf; // require('getprototypeof');
-if (hasToStringTag && gOPD && getPrototypeOf) {
- forEach(typedArrays, function (typedArray) {
- var arr = new global[typedArray]();
- if (!(Symbol.toStringTag in arr)) {
- throw new EvalError('this engine has support for Symbol.toStringTag, but ' + typedArray + ' does not have the property! Please report this.');
- }
- var proto = getPrototypeOf(arr);
- var descriptor = gOPD(proto, Symbol.toStringTag);
- if (!descriptor) {
- var superProto = getPrototypeOf(proto);
- descriptor = gOPD(superProto, Symbol.toStringTag);
- }
- toStrTags[typedArray] = descriptor.get;
- });
-}
-
-var tryTypedArrays = function tryAllTypedArrays(value) {
- var anyTrue = false;
- forEach(toStrTags, function (getter, typedArray) {
- if (!anyTrue) {
- try {
- anyTrue = getter.call(value) === typedArray;
- } catch (e) { /**/ }
- }
- });
- return anyTrue;
-};
-
-module.exports = function isTypedArray(value) {
- if (!value || typeof value !== 'object') { return false; }
- if (!hasToStringTag) {
- var tag = $slice($toString(value), 8, -1);
- return $indexOf(typedArrays, tag) > -1;
- }
- if (!gOPD) { return false; }
- return tryTypedArrays(value);
-};
-
-}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"available-typed-arrays":7,"call-bind/callBound":11,"es-abstract/helpers/getOwnPropertyDescriptor":13,"foreach":15,"has-symbols":19}],27:[function(require,module,exports){
-// shim for using process in browser
-var process = module.exports = {};
-
-// cached from whatever global is present so that test runners that stub it
-// don't break things. But we need to wrap it in a try catch in case it is
-// wrapped in strict mode code which doesn't define any globals. It's inside a
-// function because try/catches deoptimize in certain engines.
-
-var cachedSetTimeout;
-var cachedClearTimeout;
-
-function defaultSetTimout() {
- throw new Error('setTimeout has not been defined');
-}
-function defaultClearTimeout () {
- throw new Error('clearTimeout has not been defined');
-}
-(function () {
- try {
- if (typeof setTimeout === 'function') {
- cachedSetTimeout = setTimeout;
- } else {
- cachedSetTimeout = defaultSetTimout;
- }
- } catch (e) {
- cachedSetTimeout = defaultSetTimout;
- }
- try {
- if (typeof clearTimeout === 'function') {
- cachedClearTimeout = clearTimeout;
- } else {
- cachedClearTimeout = defaultClearTimeout;
- }
- } catch (e) {
- cachedClearTimeout = defaultClearTimeout;
- }
-} ())
-function runTimeout(fun) {
- if (cachedSetTimeout === setTimeout) {
- //normal enviroments in sane situations
- return setTimeout(fun, 0);
- }
- // if setTimeout wasn't available but was latter defined
- if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {
- cachedSetTimeout = setTimeout;
- return setTimeout(fun, 0);
- }
- try {
- // when when somebody has screwed with setTimeout but no I.E. maddness
- return cachedSetTimeout(fun, 0);
- } catch(e){
- try {
- // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
- return cachedSetTimeout.call(null, fun, 0);
- } catch(e){
- // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
- return cachedSetTimeout.call(this, fun, 0);
- }
- }
-
-
-}
-function runClearTimeout(marker) {
- if (cachedClearTimeout === clearTimeout) {
- //normal enviroments in sane situations
- return clearTimeout(marker);
- }
- // if clearTimeout wasn't available but was latter defined
- if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {
- cachedClearTimeout = clearTimeout;
- return clearTimeout(marker);
- }
- try {
- // when when somebody has screwed with setTimeout but no I.E. maddness
- return cachedClearTimeout(marker);
- } catch (e){
- try {
- // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
- return cachedClearTimeout.call(null, marker);
- } catch (e){
- // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
- // Some versions of I.E. have different rules for clearTimeout vs setTimeout
- return cachedClearTimeout.call(this, marker);
- }
- }
-
-
-
-}
-var queue = [];
-var draining = false;
-var currentQueue;
-var queueIndex = -1;
-
-function cleanUpNextTick() {
- if (!draining || !currentQueue) {
- return;
- }
- draining = false;
- if (currentQueue.length) {
- queue = currentQueue.concat(queue);
- } else {
- queueIndex = -1;
- }
- if (queue.length) {
- drainQueue();
- }
-}
-
-function drainQueue() {
- if (draining) {
- return;
- }
- var timeout = runTimeout(cleanUpNextTick);
- draining = true;
-
- var len = queue.length;
- while(len) {
- currentQueue = queue;
- queue = [];
- while (++queueIndex < len) {
- if (currentQueue) {
- currentQueue[queueIndex].run();
- }
- }
- queueIndex = -1;
- len = queue.length;
- }
- currentQueue = null;
- draining = false;
- runClearTimeout(timeout);
-}
-
-process.nextTick = function (fun) {
- var args = new Array(arguments.length - 1);
- if (arguments.length > 1) {
- for (var i = 1; i < arguments.length; i++) {
- args[i - 1] = arguments[i];
- }
- }
- queue.push(new Item(fun, args));
- if (queue.length === 1 && !draining) {
- runTimeout(drainQueue);
- }
-};
-
-// v8 likes predictible objects
-function Item(fun, array) {
- this.fun = fun;
- this.array = array;
-}
-Item.prototype.run = function () {
- this.fun.apply(null, this.array);
-};
-process.title = 'browser';
-process.browser = true;
-process.env = {};
-process.argv = [];
-process.version = ''; // empty string to avoid regexp issues
-process.versions = {};
-
-function noop() {}
-
-process.on = noop;
-process.addListener = noop;
-process.once = noop;
-process.off = noop;
-process.removeListener = noop;
-process.removeAllListeners = noop;
-process.emit = noop;
-process.prependListener = noop;
-process.prependOnceListener = noop;
-
-process.listeners = function (name) { return [] }
-
-process.binding = function (name) {
- throw new Error('process.binding is not supported');
-};
-
-process.cwd = function () { return '/' };
-process.chdir = function (dir) {
- throw new Error('process.chdir is not supported');
-};
-process.umask = function() { return 0; };
-
-},{}],28:[function(require,module,exports){
-/*! safe-buffer. MIT License. Feross Aboukhadijeh */
-/* eslint-disable node/no-deprecated-api */
-var buffer = require('buffer')
-var Buffer = buffer.Buffer
-
-// alternative to using Object.keys for old browsers
-function copyProps (src, dst) {
- for (var key in src) {
- dst[key] = src[key]
- }
-}
-if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
- module.exports = buffer
-} else {
- // Copy properties from require('buffer')
- copyProps(buffer, exports)
- exports.Buffer = SafeBuffer
-}
-
-function SafeBuffer (arg, encodingOrOffset, length) {
- return Buffer(arg, encodingOrOffset, length)
-}
-
-SafeBuffer.prototype = Object.create(Buffer.prototype)
-
-// Copy static methods from Buffer
-copyProps(Buffer, SafeBuffer)
-
-SafeBuffer.from = function (arg, encodingOrOffset, length) {
- if (typeof arg === 'number') {
- throw new TypeError('Argument must not be a number')
- }
- return Buffer(arg, encodingOrOffset, length)
-}
-
-SafeBuffer.alloc = function (size, fill, encoding) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- var buf = Buffer(size)
- if (fill !== undefined) {
- if (typeof encoding === 'string') {
- buf.fill(fill, encoding)
- } else {
- buf.fill(fill)
- }
- } else {
- buf.fill(0)
- }
- return buf
-}
-
-SafeBuffer.allocUnsafe = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return Buffer(size)
-}
-
-SafeBuffer.allocUnsafeSlow = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return buffer.SlowBuffer(size)
-}
-
-},{"buffer":10}],29:[function(require,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-module.exports = Stream;
-
-var EE = require('events').EventEmitter;
-var inherits = require('inherits');
-
-inherits(Stream, EE);
-Stream.Readable = require('readable-stream/lib/_stream_readable.js');
-Stream.Writable = require('readable-stream/lib/_stream_writable.js');
-Stream.Duplex = require('readable-stream/lib/_stream_duplex.js');
-Stream.Transform = require('readable-stream/lib/_stream_transform.js');
-Stream.PassThrough = require('readable-stream/lib/_stream_passthrough.js');
-Stream.finished = require('readable-stream/lib/internal/streams/end-of-stream.js')
-Stream.pipeline = require('readable-stream/lib/internal/streams/pipeline.js')
-
-// Backwards-compat with node 0.4.x
-Stream.Stream = Stream;
-
-
-
-// old-style streams. Note that the pipe method (the only relevant
-// part of this class) is overridden in the Readable class.
-
-function Stream() {
- EE.call(this);
-}
-
-Stream.prototype.pipe = function(dest, options) {
- var source = this;
-
- function ondata(chunk) {
- if (dest.writable) {
- if (false === dest.write(chunk) && source.pause) {
- source.pause();
- }
- }
- }
-
- source.on('data', ondata);
-
- function ondrain() {
- if (source.readable && source.resume) {
- source.resume();
- }
- }
-
- dest.on('drain', ondrain);
-
- // If the 'end' option is not supplied, dest.end() will be called when
- // source gets the 'end' or 'close' events. Only dest.end() once.
- if (!dest._isStdio && (!options || options.end !== false)) {
- source.on('end', onend);
- source.on('close', onclose);
- }
-
- var didOnEnd = false;
- function onend() {
- if (didOnEnd) return;
- didOnEnd = true;
-
- dest.end();
- }
-
-
- function onclose() {
- if (didOnEnd) return;
- didOnEnd = true;
-
- if (typeof dest.destroy === 'function') dest.destroy();
- }
-
- // don't leave dangling pipes when there are errors.
- function onerror(er) {
- cleanup();
- if (EE.listenerCount(this, 'error') === 0) {
- throw er; // Unhandled stream error in pipe.
- }
- }
-
- source.on('error', onerror);
- dest.on('error', onerror);
-
- // remove all the event listeners that were added.
- function cleanup() {
- source.removeListener('data', ondata);
- dest.removeListener('drain', ondrain);
-
- source.removeListener('end', onend);
- source.removeListener('close', onclose);
-
- source.removeListener('error', onerror);
- dest.removeListener('error', onerror);
-
- source.removeListener('end', cleanup);
- source.removeListener('close', cleanup);
-
- dest.removeListener('close', cleanup);
- }
-
- source.on('end', cleanup);
- source.on('close', cleanup);
-
- dest.on('close', cleanup);
-
- dest.emit('pipe', source);
-
- // Allow for unix-like usage: A.pipe(B).pipe(C)
- return dest;
-};
-
-},{"events":14,"inherits":23,"readable-stream/lib/_stream_duplex.js":31,"readable-stream/lib/_stream_passthrough.js":32,"readable-stream/lib/_stream_readable.js":33,"readable-stream/lib/_stream_transform.js":34,"readable-stream/lib/_stream_writable.js":35,"readable-stream/lib/internal/streams/end-of-stream.js":39,"readable-stream/lib/internal/streams/pipeline.js":41}],30:[function(require,module,exports){
-'use strict';
-
-function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
-
-var codes = {};
-
-function createErrorType(code, message, Base) {
- if (!Base) {
- Base = Error;
- }
-
- function getMessage(arg1, arg2, arg3) {
- if (typeof message === 'string') {
- return message;
- } else {
- return message(arg1, arg2, arg3);
- }
- }
-
- var NodeError =
- /*#__PURE__*/
- function (_Base) {
- _inheritsLoose(NodeError, _Base);
-
- function NodeError(arg1, arg2, arg3) {
- return _Base.call(this, getMessage(arg1, arg2, arg3)) || this;
- }
-
- return NodeError;
- }(Base);
-
- NodeError.prototype.name = Base.name;
- NodeError.prototype.code = code;
- codes[code] = NodeError;
-} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
-
-
-function oneOf(expected, thing) {
- if (Array.isArray(expected)) {
- var len = expected.length;
- expected = expected.map(function (i) {
- return String(i);
- });
-
- if (len > 2) {
- return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1];
- } else if (len === 2) {
- return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]);
- } else {
- return "of ".concat(thing, " ").concat(expected[0]);
- }
- } else {
- return "of ".concat(thing, " ").concat(String(expected));
- }
-} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
-
-
-function startsWith(str, search, pos) {
- return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
-} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
-
-
-function endsWith(str, search, this_len) {
- if (this_len === undefined || this_len > str.length) {
- this_len = str.length;
- }
-
- return str.substring(this_len - search.length, this_len) === search;
-} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
-
-
-function includes(str, search, start) {
- if (typeof start !== 'number') {
- start = 0;
- }
-
- if (start + search.length > str.length) {
- return false;
- } else {
- return str.indexOf(search, start) !== -1;
- }
-}
-
-createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
- return 'The value "' + value + '" is invalid for option "' + name + '"';
-}, TypeError);
-createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
- // determiner: 'must be' or 'must not be'
- var determiner;
-
- if (typeof expected === 'string' && startsWith(expected, 'not ')) {
- determiner = 'must not be';
- expected = expected.replace(/^not /, '');
- } else {
- determiner = 'must be';
- }
-
- var msg;
-
- if (endsWith(name, ' argument')) {
- // For cases like 'first argument'
- msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
- } else {
- var type = includes(name, '.') ? 'property' : 'argument';
- msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
- }
-
- msg += ". Received type ".concat(typeof actual);
- return msg;
-}, TypeError);
-createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
-createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
- return 'The ' + name + ' method is not implemented';
-});
-createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
-createErrorType('ERR_STREAM_DESTROYED', function (name) {
- return 'Cannot call ' + name + ' after a stream was destroyed';
-});
-createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
-createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
-createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
-createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
-createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
- return 'Unknown encoding: ' + arg;
-}, TypeError);
-createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
-module.exports.codes = codes;
-
-},{}],31:[function(require,module,exports){
-(function (process){(function (){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-// a duplex stream is just a stream that is both readable and writable.
-// Since JS doesn't have multiple prototypal inheritance, this class
-// prototypally inherits from Readable, and then parasitically from
-// Writable.
-'use strict';
-/**/
-
-var objectKeys = Object.keys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-module.exports = Duplex;
-
-var Readable = require('./_stream_readable');
-
-var Writable = require('./_stream_writable');
-
-require('inherits')(Duplex, Readable);
-
-{
- // Allow the keys array to be GC'ed.
- var keys = objectKeys(Writable.prototype);
-
- for (var v = 0; v < keys.length; v++) {
- var method = keys[v];
- if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
- }
-}
-
-function Duplex(options) {
- if (!(this instanceof Duplex)) return new Duplex(options);
- Readable.call(this, options);
- Writable.call(this, options);
- this.allowHalfOpen = true;
-
- if (options) {
- if (options.readable === false) this.readable = false;
- if (options.writable === false) this.writable = false;
-
- if (options.allowHalfOpen === false) {
- this.allowHalfOpen = false;
- this.once('end', onend);
- }
- }
-}
-
-Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState.highWaterMark;
- }
-});
-Object.defineProperty(Duplex.prototype, 'writableBuffer', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState && this._writableState.getBuffer();
- }
-});
-Object.defineProperty(Duplex.prototype, 'writableLength', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState.length;
- }
-}); // the no-half-open enforcer
-
-function onend() {
- // If the writable side ended, then we're ok.
- if (this._writableState.ended) return; // no more data can be written.
- // But allow more writes to happen in this tick.
-
- process.nextTick(onEndNT, this);
-}
-
-function onEndNT(self) {
- self.end();
-}
-
-Object.defineProperty(Duplex.prototype, 'destroyed', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- if (this._readableState === undefined || this._writableState === undefined) {
- return false;
- }
-
- return this._readableState.destroyed && this._writableState.destroyed;
- },
- set: function set(value) {
- // we ignore the value if the stream
- // has not been initialized yet
- if (this._readableState === undefined || this._writableState === undefined) {
- return;
- } // backward compatibility, the user is explicitly
- // managing destroyed
-
-
- this._readableState.destroyed = value;
- this._writableState.destroyed = value;
- }
-});
-}).call(this)}).call(this,require('_process'))
-},{"./_stream_readable":33,"./_stream_writable":35,"_process":27,"inherits":23}],32:[function(require,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-// a passthrough stream.
-// basically just the most minimal sort of Transform stream.
-// Every written chunk gets output as-is.
-'use strict';
-
-module.exports = PassThrough;
-
-var Transform = require('./_stream_transform');
-
-require('inherits')(PassThrough, Transform);
-
-function PassThrough(options) {
- if (!(this instanceof PassThrough)) return new PassThrough(options);
- Transform.call(this, options);
-}
-
-PassThrough.prototype._transform = function (chunk, encoding, cb) {
- cb(null, chunk);
-};
-},{"./_stream_transform":34,"inherits":23}],33:[function(require,module,exports){
-(function (process,global){(function (){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-'use strict';
-
-module.exports = Readable;
-/**/
-
-var Duplex;
-/**/
-
-Readable.ReadableState = ReadableState;
-/**/
-
-var EE = require('events').EventEmitter;
-
-var EElistenerCount = function EElistenerCount(emitter, type) {
- return emitter.listeners(type).length;
-};
-/**/
-
-/**/
-
-
-var Stream = require('./internal/streams/stream');
-/**/
-
-
-var Buffer = require('buffer').Buffer;
-
-var OurUint8Array = global.Uint8Array || function () {};
-
-function _uint8ArrayToBuffer(chunk) {
- return Buffer.from(chunk);
-}
-
-function _isUint8Array(obj) {
- return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
-/**/
-
-
-var debugUtil = require('util');
-
-var debug;
-
-if (debugUtil && debugUtil.debuglog) {
- debug = debugUtil.debuglog('stream');
-} else {
- debug = function debug() {};
-}
-/**/
-
-
-var BufferList = require('./internal/streams/buffer_list');
-
-var destroyImpl = require('./internal/streams/destroy');
-
-var _require = require('./internal/streams/state'),
- getHighWaterMark = _require.getHighWaterMark;
-
-var _require$codes = require('../errors').codes,
- ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
- ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,
- ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
- ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance.
-
-
-var StringDecoder;
-var createReadableStreamAsyncIterator;
-var from;
-
-require('inherits')(Readable, Stream);
-
-var errorOrDestroy = destroyImpl.errorOrDestroy;
-var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
-
-function prependListener(emitter, event, fn) {
- // Sadly this is not cacheable as some libraries bundle their own
- // event emitter implementation with them.
- if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any
- // userland ones. NEVER DO THIS. This is here only because this code needs
- // to continue to work with older versions of Node.js that do not include
- // the prependListener() method. The goal is to eventually remove this hack.
-
- if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
-}
-
-function ReadableState(options, stream, isDuplex) {
- Duplex = Duplex || require('./_stream_duplex');
- options = options || {}; // Duplex streams are both readable and writable, but share
- // the same options object.
- // However, some cases require setting options to different
- // values for the readable and the writable sides of the duplex stream.
- // These options can be provided separately as readableXXX and writableXXX.
-
- if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to
- // make all the buffer merging and length checks go away
-
- this.objectMode = !!options.objectMode;
- if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer
- // Note: 0 is a valid value, means "don't call _read preemptively ever"
-
- this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the
- // linked list can remove elements from the beginning faster than
- // array.shift()
-
- this.buffer = new BufferList();
- this.length = 0;
- this.pipes = null;
- this.pipesCount = 0;
- this.flowing = null;
- this.ended = false;
- this.endEmitted = false;
- this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted
- // immediately, or on a later tick. We set this to true at first, because
- // any actions that shouldn't happen until "later" should generally also
- // not happen before the first read call.
-
- this.sync = true; // whenever we return null, then we set a flag to say
- // that we're awaiting a 'readable' event emission.
-
- this.needReadable = false;
- this.emittedReadable = false;
- this.readableListening = false;
- this.resumeScheduled = false;
- this.paused = true; // Should close be emitted on destroy. Defaults to true.
-
- this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish')
-
- this.autoDestroy = !!options.autoDestroy; // has it been destroyed
-
- this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string
- // encoding is 'binary' so we have to make this configurable.
- // Everything else in the universe uses 'utf8', though.
-
- this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s
-
- this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled
-
- this.readingMore = false;
- this.decoder = null;
- this.encoding = null;
-
- if (options.encoding) {
- if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
- this.decoder = new StringDecoder(options.encoding);
- this.encoding = options.encoding;
- }
-}
-
-function Readable(options) {
- Duplex = Duplex || require('./_stream_duplex');
- if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside
- // the ReadableState constructor, at least with V8 6.5
-
- var isDuplex = this instanceof Duplex;
- this._readableState = new ReadableState(options, this, isDuplex); // legacy
-
- this.readable = true;
-
- if (options) {
- if (typeof options.read === 'function') this._read = options.read;
- if (typeof options.destroy === 'function') this._destroy = options.destroy;
- }
-
- Stream.call(this);
-}
-
-Object.defineProperty(Readable.prototype, 'destroyed', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- if (this._readableState === undefined) {
- return false;
- }
-
- return this._readableState.destroyed;
- },
- set: function set(value) {
- // we ignore the value if the stream
- // has not been initialized yet
- if (!this._readableState) {
- return;
- } // backward compatibility, the user is explicitly
- // managing destroyed
-
-
- this._readableState.destroyed = value;
- }
-});
-Readable.prototype.destroy = destroyImpl.destroy;
-Readable.prototype._undestroy = destroyImpl.undestroy;
-
-Readable.prototype._destroy = function (err, cb) {
- cb(err);
-}; // Manually shove something into the read() buffer.
-// This returns true if the highWaterMark has not been hit yet,
-// similar to how Writable.write() returns true if you should
-// write() some more.
-
-
-Readable.prototype.push = function (chunk, encoding) {
- var state = this._readableState;
- var skipChunkCheck;
-
- if (!state.objectMode) {
- if (typeof chunk === 'string') {
- encoding = encoding || state.defaultEncoding;
-
- if (encoding !== state.encoding) {
- chunk = Buffer.from(chunk, encoding);
- encoding = '';
- }
-
- skipChunkCheck = true;
- }
- } else {
- skipChunkCheck = true;
- }
-
- return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
-}; // Unshift should *always* be something directly out of read()
-
-
-Readable.prototype.unshift = function (chunk) {
- return readableAddChunk(this, chunk, null, true, false);
-};
-
-function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
- debug('readableAddChunk', chunk);
- var state = stream._readableState;
-
- if (chunk === null) {
- state.reading = false;
- onEofChunk(stream, state);
- } else {
- var er;
- if (!skipChunkCheck) er = chunkInvalid(state, chunk);
-
- if (er) {
- errorOrDestroy(stream, er);
- } else if (state.objectMode || chunk && chunk.length > 0) {
- if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
- chunk = _uint8ArrayToBuffer(chunk);
- }
-
- if (addToFront) {
- if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);
- } else if (state.ended) {
- errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());
- } else if (state.destroyed) {
- return false;
- } else {
- state.reading = false;
-
- if (state.decoder && !encoding) {
- chunk = state.decoder.write(chunk);
- if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
- } else {
- addChunk(stream, state, chunk, false);
- }
- }
- } else if (!addToFront) {
- state.reading = false;
- maybeReadMore(stream, state);
- }
- } // We can push more data if we are below the highWaterMark.
- // Also, if we have no data yet, we can stand some more bytes.
- // This is to work around cases where hwm=0, such as the repl.
-
-
- return !state.ended && (state.length < state.highWaterMark || state.length === 0);
-}
-
-function addChunk(stream, state, chunk, addToFront) {
- if (state.flowing && state.length === 0 && !state.sync) {
- state.awaitDrain = 0;
- stream.emit('data', chunk);
- } else {
- // update the buffer info.
- state.length += state.objectMode ? 1 : chunk.length;
- if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
- if (state.needReadable) emitReadable(stream);
- }
-
- maybeReadMore(stream, state);
-}
-
-function chunkInvalid(state, chunk) {
- var er;
-
- if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
- er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);
- }
-
- return er;
-}
-
-Readable.prototype.isPaused = function () {
- return this._readableState.flowing === false;
-}; // backwards compatibility.
-
-
-Readable.prototype.setEncoding = function (enc) {
- if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
- var decoder = new StringDecoder(enc);
- this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8
-
- this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers:
-
- var p = this._readableState.buffer.head;
- var content = '';
-
- while (p !== null) {
- content += decoder.write(p.data);
- p = p.next;
- }
-
- this._readableState.buffer.clear();
-
- if (content !== '') this._readableState.buffer.push(content);
- this._readableState.length = content.length;
- return this;
-}; // Don't raise the hwm > 1GB
-
-
-var MAX_HWM = 0x40000000;
-
-function computeNewHighWaterMark(n) {
- if (n >= MAX_HWM) {
- // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.
- n = MAX_HWM;
- } else {
- // Get the next highest power of 2 to prevent increasing hwm excessively in
- // tiny amounts
- n--;
- n |= n >>> 1;
- n |= n >>> 2;
- n |= n >>> 4;
- n |= n >>> 8;
- n |= n >>> 16;
- n++;
- }
-
- return n;
-} // This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-
-
-function howMuchToRead(n, state) {
- if (n <= 0 || state.length === 0 && state.ended) return 0;
- if (state.objectMode) return 1;
-
- if (n !== n) {
- // Only flow one buffer at a time
- if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
- } // If we're asking for more than the current hwm, then raise the hwm.
-
-
- if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
- if (n <= state.length) return n; // Don't have enough
-
- if (!state.ended) {
- state.needReadable = true;
- return 0;
- }
-
- return state.length;
-} // you can override either this method, or the async _read(n) below.
-
-
-Readable.prototype.read = function (n) {
- debug('read', n);
- n = parseInt(n, 10);
- var state = this._readableState;
- var nOrig = n;
- if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we
- // already have a bunch of data in the buffer, then just trigger
- // the 'readable' event and move on.
-
- if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {
- debug('read: emitReadable', state.length, state.ended);
- if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
- return null;
- }
-
- n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up.
-
- if (n === 0 && state.ended) {
- if (state.length === 0) endReadable(this);
- return null;
- } // All the actual chunk generation logic needs to be
- // *below* the call to _read. The reason is that in certain
- // synthetic stream cases, such as passthrough streams, _read
- // may be a completely synchronous operation which may change
- // the state of the read buffer, providing enough data when
- // before there was *not* enough.
- //
- // So, the steps are:
- // 1. Figure out what the state of things will be after we do
- // a read from the buffer.
- //
- // 2. If that resulting state will trigger a _read, then call _read.
- // Note that this may be asynchronous, or synchronous. Yes, it is
- // deeply ugly to write APIs this way, but that still doesn't mean
- // that the Readable class should behave improperly, as streams are
- // designed to be sync/async agnostic.
- // Take note if the _read call is sync or async (ie, if the read call
- // has returned yet), so that we know whether or not it's safe to emit
- // 'readable' etc.
- //
- // 3. Actually pull the requested chunks out of the buffer and return.
- // if we need a readable event, then we need to do some reading.
-
-
- var doRead = state.needReadable;
- debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some
-
- if (state.length === 0 || state.length - n < state.highWaterMark) {
- doRead = true;
- debug('length less than watermark', doRead);
- } // however, if we've ended, then there's no point, and if we're already
- // reading, then it's unnecessary.
-
-
- if (state.ended || state.reading) {
- doRead = false;
- debug('reading or ended', doRead);
- } else if (doRead) {
- debug('do read');
- state.reading = true;
- state.sync = true; // if the length is currently zero, then we *need* a readable event.
-
- if (state.length === 0) state.needReadable = true; // call internal read method
-
- this._read(state.highWaterMark);
-
- state.sync = false; // If _read pushed data synchronously, then `reading` will be false,
- // and we need to re-evaluate how much data we can return to the user.
-
- if (!state.reading) n = howMuchToRead(nOrig, state);
- }
-
- var ret;
- if (n > 0) ret = fromList(n, state);else ret = null;
-
- if (ret === null) {
- state.needReadable = state.length <= state.highWaterMark;
- n = 0;
- } else {
- state.length -= n;
- state.awaitDrain = 0;
- }
-
- if (state.length === 0) {
- // If we have nothing in the buffer, then we want to know
- // as soon as we *do* get something into the buffer.
- if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick.
-
- if (nOrig !== n && state.ended) endReadable(this);
- }
-
- if (ret !== null) this.emit('data', ret);
- return ret;
-};
-
-function onEofChunk(stream, state) {
- debug('onEofChunk');
- if (state.ended) return;
-
- if (state.decoder) {
- var chunk = state.decoder.end();
-
- if (chunk && chunk.length) {
- state.buffer.push(chunk);
- state.length += state.objectMode ? 1 : chunk.length;
- }
- }
-
- state.ended = true;
-
- if (state.sync) {
- // if we are sync, wait until next tick to emit the data.
- // Otherwise we risk emitting data in the flow()
- // the readable code triggers during a read() call
- emitReadable(stream);
- } else {
- // emit 'readable' now to make sure it gets picked up.
- state.needReadable = false;
-
- if (!state.emittedReadable) {
- state.emittedReadable = true;
- emitReadable_(stream);
- }
- }
-} // Don't emit readable right away in sync mode, because this can trigger
-// another read() call => stack overflow. This way, it might trigger
-// a nextTick recursion warning, but that's not so bad.
-
-
-function emitReadable(stream) {
- var state = stream._readableState;
- debug('emitReadable', state.needReadable, state.emittedReadable);
- state.needReadable = false;
-
- if (!state.emittedReadable) {
- debug('emitReadable', state.flowing);
- state.emittedReadable = true;
- process.nextTick(emitReadable_, stream);
- }
-}
-
-function emitReadable_(stream) {
- var state = stream._readableState;
- debug('emitReadable_', state.destroyed, state.length, state.ended);
-
- if (!state.destroyed && (state.length || state.ended)) {
- stream.emit('readable');
- state.emittedReadable = false;
- } // The stream needs another readable event if
- // 1. It is not flowing, as the flow mechanism will take
- // care of it.
- // 2. It is not ended.
- // 3. It is below the highWaterMark, so we can schedule
- // another readable later.
-
-
- state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;
- flow(stream);
-} // at this point, the user has presumably seen the 'readable' event,
-// and called read() to consume some data. that may have triggered
-// in turn another _read(n) call, in which case reading = true if
-// it's in progress.
-// However, if we're not ended, or reading, and the length < hwm,
-// then go ahead and try to read some more preemptively.
-
-
-function maybeReadMore(stream, state) {
- if (!state.readingMore) {
- state.readingMore = true;
- process.nextTick(maybeReadMore_, stream, state);
- }
-}
-
-function maybeReadMore_(stream, state) {
- // Attempt to read more data if we should.
- //
- // The conditions for reading more data are (one of):
- // - Not enough data buffered (state.length < state.highWaterMark). The loop
- // is responsible for filling the buffer with enough data if such data
- // is available. If highWaterMark is 0 and we are not in the flowing mode
- // we should _not_ attempt to buffer any extra data. We'll get more data
- // when the stream consumer calls read() instead.
- // - No data in the buffer, and the stream is in flowing mode. In this mode
- // the loop below is responsible for ensuring read() is called. Failing to
- // call read here would abort the flow and there's no other mechanism for
- // continuing the flow if the stream consumer has just subscribed to the
- // 'data' event.
- //
- // In addition to the above conditions to keep reading data, the following
- // conditions prevent the data from being read:
- // - The stream has ended (state.ended).
- // - There is already a pending 'read' operation (state.reading). This is a
- // case where the the stream has called the implementation defined _read()
- // method, but they are processing the call asynchronously and have _not_
- // called push() with new data. In this case we skip performing more
- // read()s. The execution ends in this method again after the _read() ends
- // up calling push() with more data.
- while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {
- var len = state.length;
- debug('maybeReadMore read 0');
- stream.read(0);
- if (len === state.length) // didn't get any data, stop spinning.
- break;
- }
-
- state.readingMore = false;
-} // abstract method. to be overridden in specific implementation classes.
-// call cb(er, data) where data is <= n in length.
-// for virtual (non-string, non-buffer) streams, "length" is somewhat
-// arbitrary, and perhaps not very meaningful.
-
-
-Readable.prototype._read = function (n) {
- errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));
-};
-
-Readable.prototype.pipe = function (dest, pipeOpts) {
- var src = this;
- var state = this._readableState;
-
- switch (state.pipesCount) {
- case 0:
- state.pipes = dest;
- break;
-
- case 1:
- state.pipes = [state.pipes, dest];
- break;
-
- default:
- state.pipes.push(dest);
- break;
- }
-
- state.pipesCount += 1;
- debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
- var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
- var endFn = doEnd ? onend : unpipe;
- if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);
- dest.on('unpipe', onunpipe);
-
- function onunpipe(readable, unpipeInfo) {
- debug('onunpipe');
-
- if (readable === src) {
- if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
- unpipeInfo.hasUnpiped = true;
- cleanup();
- }
- }
- }
-
- function onend() {
- debug('onend');
- dest.end();
- } // when the dest drains, it reduces the awaitDrain counter
- // on the source. This would be more elegant with a .once()
- // handler in flow(), but adding and removing repeatedly is
- // too slow.
-
-
- var ondrain = pipeOnDrain(src);
- dest.on('drain', ondrain);
- var cleanedUp = false;
-
- function cleanup() {
- debug('cleanup'); // cleanup event handlers once the pipe is broken
-
- dest.removeListener('close', onclose);
- dest.removeListener('finish', onfinish);
- dest.removeListener('drain', ondrain);
- dest.removeListener('error', onerror);
- dest.removeListener('unpipe', onunpipe);
- src.removeListener('end', onend);
- src.removeListener('end', unpipe);
- src.removeListener('data', ondata);
- cleanedUp = true; // if the reader is waiting for a drain event from this
- // specific writer, then it would cause it to never start
- // flowing again.
- // So, if this is awaiting a drain, then we just call it now.
- // If we don't know, then assume that we are waiting for one.
-
- if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
- }
-
- src.on('data', ondata);
-
- function ondata(chunk) {
- debug('ondata');
- var ret = dest.write(chunk);
- debug('dest.write', ret);
-
- if (ret === false) {
- // If the user unpiped during `dest.write()`, it is possible
- // to get stuck in a permanently paused state if that write
- // also returned false.
- // => Check whether `dest` is still a piping destination.
- if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
- debug('false write response, pause', state.awaitDrain);
- state.awaitDrain++;
- }
-
- src.pause();
- }
- } // if the dest has an error, then stop piping into it.
- // however, don't suppress the throwing behavior for this.
-
-
- function onerror(er) {
- debug('onerror', er);
- unpipe();
- dest.removeListener('error', onerror);
- if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);
- } // Make sure our error handler is attached before userland ones.
-
-
- prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once.
-
- function onclose() {
- dest.removeListener('finish', onfinish);
- unpipe();
- }
-
- dest.once('close', onclose);
-
- function onfinish() {
- debug('onfinish');
- dest.removeListener('close', onclose);
- unpipe();
- }
-
- dest.once('finish', onfinish);
-
- function unpipe() {
- debug('unpipe');
- src.unpipe(dest);
- } // tell the dest that it's being piped to
-
-
- dest.emit('pipe', src); // start the flow if it hasn't been started already.
-
- if (!state.flowing) {
- debug('pipe resume');
- src.resume();
- }
-
- return dest;
-};
-
-function pipeOnDrain(src) {
- return function pipeOnDrainFunctionResult() {
- var state = src._readableState;
- debug('pipeOnDrain', state.awaitDrain);
- if (state.awaitDrain) state.awaitDrain--;
-
- if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
- state.flowing = true;
- flow(src);
- }
- };
-}
-
-Readable.prototype.unpipe = function (dest) {
- var state = this._readableState;
- var unpipeInfo = {
- hasUnpiped: false
- }; // if we're not piping anywhere, then do nothing.
-
- if (state.pipesCount === 0) return this; // just one destination. most common case.
-
- if (state.pipesCount === 1) {
- // passed in one, but it's not the right one.
- if (dest && dest !== state.pipes) return this;
- if (!dest) dest = state.pipes; // got a match.
-
- state.pipes = null;
- state.pipesCount = 0;
- state.flowing = false;
- if (dest) dest.emit('unpipe', this, unpipeInfo);
- return this;
- } // slow case. multiple pipe destinations.
-
-
- if (!dest) {
- // remove all.
- var dests = state.pipes;
- var len = state.pipesCount;
- state.pipes = null;
- state.pipesCount = 0;
- state.flowing = false;
-
- for (var i = 0; i < len; i++) {
- dests[i].emit('unpipe', this, {
- hasUnpiped: false
- });
- }
-
- return this;
- } // try to find the right one.
-
-
- var index = indexOf(state.pipes, dest);
- if (index === -1) return this;
- state.pipes.splice(index, 1);
- state.pipesCount -= 1;
- if (state.pipesCount === 1) state.pipes = state.pipes[0];
- dest.emit('unpipe', this, unpipeInfo);
- return this;
-}; // set up data events if they are asked for
-// Ensure readable listeners eventually get something
-
-
-Readable.prototype.on = function (ev, fn) {
- var res = Stream.prototype.on.call(this, ev, fn);
- var state = this._readableState;
-
- if (ev === 'data') {
- // update readableListening so that resume() may be a no-op
- // a few lines down. This is needed to support once('readable').
- state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused
-
- if (state.flowing !== false) this.resume();
- } else if (ev === 'readable') {
- if (!state.endEmitted && !state.readableListening) {
- state.readableListening = state.needReadable = true;
- state.flowing = false;
- state.emittedReadable = false;
- debug('on readable', state.length, state.reading);
-
- if (state.length) {
- emitReadable(this);
- } else if (!state.reading) {
- process.nextTick(nReadingNextTick, this);
- }
- }
- }
-
- return res;
-};
-
-Readable.prototype.addListener = Readable.prototype.on;
-
-Readable.prototype.removeListener = function (ev, fn) {
- var res = Stream.prototype.removeListener.call(this, ev, fn);
-
- if (ev === 'readable') {
- // We need to check if there is someone still listening to
- // readable and reset the state. However this needs to happen
- // after readable has been emitted but before I/O (nextTick) to
- // support once('readable', fn) cycles. This means that calling
- // resume within the same tick will have no
- // effect.
- process.nextTick(updateReadableListening, this);
- }
-
- return res;
-};
-
-Readable.prototype.removeAllListeners = function (ev) {
- var res = Stream.prototype.removeAllListeners.apply(this, arguments);
-
- if (ev === 'readable' || ev === undefined) {
- // We need to check if there is someone still listening to
- // readable and reset the state. However this needs to happen
- // after readable has been emitted but before I/O (nextTick) to
- // support once('readable', fn) cycles. This means that calling
- // resume within the same tick will have no
- // effect.
- process.nextTick(updateReadableListening, this);
- }
-
- return res;
-};
-
-function updateReadableListening(self) {
- var state = self._readableState;
- state.readableListening = self.listenerCount('readable') > 0;
-
- if (state.resumeScheduled && !state.paused) {
- // flowing needs to be set to true now, otherwise
- // the upcoming resume will not flow.
- state.flowing = true; // crude way to check if we should resume
- } else if (self.listenerCount('data') > 0) {
- self.resume();
- }
-}
-
-function nReadingNextTick(self) {
- debug('readable nexttick read 0');
- self.read(0);
-} // pause() and resume() are remnants of the legacy readable stream API
-// If the user uses them, then switch into old mode.
-
-
-Readable.prototype.resume = function () {
- var state = this._readableState;
-
- if (!state.flowing) {
- debug('resume'); // we flow only if there is no one listening
- // for readable, but we still have to call
- // resume()
-
- state.flowing = !state.readableListening;
- resume(this, state);
- }
-
- state.paused = false;
- return this;
-};
-
-function resume(stream, state) {
- if (!state.resumeScheduled) {
- state.resumeScheduled = true;
- process.nextTick(resume_, stream, state);
- }
-}
-
-function resume_(stream, state) {
- debug('resume', state.reading);
-
- if (!state.reading) {
- stream.read(0);
- }
-
- state.resumeScheduled = false;
- stream.emit('resume');
- flow(stream);
- if (state.flowing && !state.reading) stream.read(0);
-}
-
-Readable.prototype.pause = function () {
- debug('call pause flowing=%j', this._readableState.flowing);
-
- if (this._readableState.flowing !== false) {
- debug('pause');
- this._readableState.flowing = false;
- this.emit('pause');
- }
-
- this._readableState.paused = true;
- return this;
-};
-
-function flow(stream) {
- var state = stream._readableState;
- debug('flow', state.flowing);
-
- while (state.flowing && stream.read() !== null) {
- ;
- }
-} // wrap an old-style stream as the async data source.
-// This is *not* part of the readable stream interface.
-// It is an ugly unfortunate mess of history.
-
-
-Readable.prototype.wrap = function (stream) {
- var _this = this;
-
- var state = this._readableState;
- var paused = false;
- stream.on('end', function () {
- debug('wrapped end');
-
- if (state.decoder && !state.ended) {
- var chunk = state.decoder.end();
- if (chunk && chunk.length) _this.push(chunk);
- }
-
- _this.push(null);
- });
- stream.on('data', function (chunk) {
- debug('wrapped data');
- if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode
-
- if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
-
- var ret = _this.push(chunk);
-
- if (!ret) {
- paused = true;
- stream.pause();
- }
- }); // proxy all the other methods.
- // important when wrapping filters and duplexes.
-
- for (var i in stream) {
- if (this[i] === undefined && typeof stream[i] === 'function') {
- this[i] = function methodWrap(method) {
- return function methodWrapReturnFunction() {
- return stream[method].apply(stream, arguments);
- };
- }(i);
- }
- } // proxy certain important events.
-
-
- for (var n = 0; n < kProxyEvents.length; n++) {
- stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
- } // when we try to consume some more bytes, simply unpause the
- // underlying stream.
-
-
- this._read = function (n) {
- debug('wrapped _read', n);
-
- if (paused) {
- paused = false;
- stream.resume();
- }
- };
-
- return this;
-};
-
-if (typeof Symbol === 'function') {
- Readable.prototype[Symbol.asyncIterator] = function () {
- if (createReadableStreamAsyncIterator === undefined) {
- createReadableStreamAsyncIterator = require('./internal/streams/async_iterator');
- }
-
- return createReadableStreamAsyncIterator(this);
- };
-}
-
-Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._readableState.highWaterMark;
- }
-});
-Object.defineProperty(Readable.prototype, 'readableBuffer', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._readableState && this._readableState.buffer;
- }
-});
-Object.defineProperty(Readable.prototype, 'readableFlowing', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._readableState.flowing;
- },
- set: function set(state) {
- if (this._readableState) {
- this._readableState.flowing = state;
- }
- }
-}); // exposed for testing purposes only.
-
-Readable._fromList = fromList;
-Object.defineProperty(Readable.prototype, 'readableLength', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._readableState.length;
- }
-}); // Pluck off n bytes from an array of buffers.
-// Length is the combined lengths of all the buffers in the list.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-
-function fromList(n, state) {
- // nothing buffered
- if (state.length === 0) return null;
- var ret;
- if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
- // read it all, truncate the list
- if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);
- state.buffer.clear();
- } else {
- // read part of list
- ret = state.buffer.consume(n, state.decoder);
- }
- return ret;
-}
-
-function endReadable(stream) {
- var state = stream._readableState;
- debug('endReadable', state.endEmitted);
-
- if (!state.endEmitted) {
- state.ended = true;
- process.nextTick(endReadableNT, state, stream);
- }
-}
-
-function endReadableNT(state, stream) {
- debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift.
-
- if (!state.endEmitted && state.length === 0) {
- state.endEmitted = true;
- stream.readable = false;
- stream.emit('end');
-
- if (state.autoDestroy) {
- // In case of duplex streams we need a way to detect
- // if the writable side is ready for autoDestroy as well
- var wState = stream._writableState;
-
- if (!wState || wState.autoDestroy && wState.finished) {
- stream.destroy();
- }
- }
- }
-}
-
-if (typeof Symbol === 'function') {
- Readable.from = function (iterable, opts) {
- if (from === undefined) {
- from = require('./internal/streams/from');
- }
-
- return from(Readable, iterable, opts);
- };
-}
-
-function indexOf(xs, x) {
- for (var i = 0, l = xs.length; i < l; i++) {
- if (xs[i] === x) return i;
- }
-
- return -1;
-}
-}).call(this)}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"../errors":30,"./_stream_duplex":31,"./internal/streams/async_iterator":36,"./internal/streams/buffer_list":37,"./internal/streams/destroy":38,"./internal/streams/from":40,"./internal/streams/state":42,"./internal/streams/stream":43,"_process":27,"buffer":10,"events":14,"inherits":23,"string_decoder/":44,"util":9}],34:[function(require,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-// a transform stream is a readable/writable stream where you do
-// something with the data. Sometimes it's called a "filter",
-// but that's not a great name for it, since that implies a thing where
-// some bits pass through, and others are simply ignored. (That would
-// be a valid example of a transform, of course.)
-//
-// While the output is causally related to the input, it's not a
-// necessarily symmetric or synchronous transformation. For example,
-// a zlib stream might take multiple plain-text writes(), and then
-// emit a single compressed chunk some time in the future.
-//
-// Here's how this works:
-//
-// The Transform stream has all the aspects of the readable and writable
-// stream classes. When you write(chunk), that calls _write(chunk,cb)
-// internally, and returns false if there's a lot of pending writes
-// buffered up. When you call read(), that calls _read(n) until
-// there's enough pending readable data buffered up.
-//
-// In a transform stream, the written data is placed in a buffer. When
-// _read(n) is called, it transforms the queued up data, calling the
-// buffered _write cb's as it consumes chunks. If consuming a single
-// written chunk would result in multiple output chunks, then the first
-// outputted bit calls the readcb, and subsequent chunks just go into
-// the read buffer, and will cause it to emit 'readable' if necessary.
-//
-// This way, back-pressure is actually determined by the reading side,
-// since _read has to be called to start processing a new chunk. However,
-// a pathological inflate type of transform can cause excessive buffering
-// here. For example, imagine a stream where every byte of input is
-// interpreted as an integer from 0-255, and then results in that many
-// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
-// 1kb of data being output. In this case, you could write a very small
-// amount of input, and end up with a very large amount of output. In
-// such a pathological inflating mechanism, there'd be no way to tell
-// the system to stop doing the transform. A single 4MB write could
-// cause the system to run out of memory.
-//
-// However, even in such a pathological case, only a single written chunk
-// would be consumed, and then the rest would wait (un-transformed) until
-// the results of the previous transformed chunk were consumed.
-'use strict';
-
-module.exports = Transform;
-
-var _require$codes = require('../errors').codes,
- ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
- ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
- ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,
- ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;
-
-var Duplex = require('./_stream_duplex');
-
-require('inherits')(Transform, Duplex);
-
-function afterTransform(er, data) {
- var ts = this._transformState;
- ts.transforming = false;
- var cb = ts.writecb;
-
- if (cb === null) {
- return this.emit('error', new ERR_MULTIPLE_CALLBACK());
- }
-
- ts.writechunk = null;
- ts.writecb = null;
- if (data != null) // single equals check for both `null` and `undefined`
- this.push(data);
- cb(er);
- var rs = this._readableState;
- rs.reading = false;
-
- if (rs.needReadable || rs.length < rs.highWaterMark) {
- this._read(rs.highWaterMark);
- }
-}
-
-function Transform(options) {
- if (!(this instanceof Transform)) return new Transform(options);
- Duplex.call(this, options);
- this._transformState = {
- afterTransform: afterTransform.bind(this),
- needTransform: false,
- transforming: false,
- writecb: null,
- writechunk: null,
- writeencoding: null
- }; // start out asking for a readable event once data is transformed.
-
- this._readableState.needReadable = true; // we have implemented the _read method, and done the other things
- // that Readable wants before the first _read call, so unset the
- // sync guard flag.
-
- this._readableState.sync = false;
-
- if (options) {
- if (typeof options.transform === 'function') this._transform = options.transform;
- if (typeof options.flush === 'function') this._flush = options.flush;
- } // When the writable side finishes, then flush out anything remaining.
-
-
- this.on('prefinish', prefinish);
-}
-
-function prefinish() {
- var _this = this;
-
- if (typeof this._flush === 'function' && !this._readableState.destroyed) {
- this._flush(function (er, data) {
- done(_this, er, data);
- });
- } else {
- done(this, null, null);
- }
-}
-
-Transform.prototype.push = function (chunk, encoding) {
- this._transformState.needTransform = false;
- return Duplex.prototype.push.call(this, chunk, encoding);
-}; // This is the part where you do stuff!
-// override this function in implementation classes.
-// 'chunk' is an input chunk.
-//
-// Call `push(newChunk)` to pass along transformed output
-// to the readable side. You may call 'push' zero or more times.
-//
-// Call `cb(err)` when you are done with this chunk. If you pass
-// an error, then that'll put the hurt on the whole operation. If you
-// never call cb(), then you'll never get another chunk.
-
-
-Transform.prototype._transform = function (chunk, encoding, cb) {
- cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
-};
-
-Transform.prototype._write = function (chunk, encoding, cb) {
- var ts = this._transformState;
- ts.writecb = cb;
- ts.writechunk = chunk;
- ts.writeencoding = encoding;
-
- if (!ts.transforming) {
- var rs = this._readableState;
- if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
- }
-}; // Doesn't matter what the args are here.
-// _transform does all the work.
-// That we got here means that the readable side wants more data.
-
-
-Transform.prototype._read = function (n) {
- var ts = this._transformState;
-
- if (ts.writechunk !== null && !ts.transforming) {
- ts.transforming = true;
-
- this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
- } else {
- // mark that we need a transform, so that any data that comes in
- // will get processed, now that we've asked for it.
- ts.needTransform = true;
- }
-};
-
-Transform.prototype._destroy = function (err, cb) {
- Duplex.prototype._destroy.call(this, err, function (err2) {
- cb(err2);
- });
-};
-
-function done(stream, er, data) {
- if (er) return stream.emit('error', er);
- if (data != null) // single equals check for both `null` and `undefined`
- stream.push(data); // TODO(BridgeAR): Write a test for these two error cases
- // if there's nothing in the write buffer, then that means
- // that nothing more will ever be provided
-
- if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
- if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
- return stream.push(null);
-}
-},{"../errors":30,"./_stream_duplex":31,"inherits":23}],35:[function(require,module,exports){
-(function (process,global){(function (){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-// A bit simpler than readable streams.
-// Implement an async ._write(chunk, encoding, cb), and it'll handle all
-// the drain event emission and buffering.
-'use strict';
-
-module.exports = Writable;
-/* */
-
-function WriteReq(chunk, encoding, cb) {
- this.chunk = chunk;
- this.encoding = encoding;
- this.callback = cb;
- this.next = null;
-} // It seems a linked list but it is not
-// there will be only 2 of these for each stream
-
-
-function CorkedRequest(state) {
- var _this = this;
-
- this.next = null;
- this.entry = null;
-
- this.finish = function () {
- onCorkedFinish(_this, state);
- };
-}
-/* */
-
-/**/
-
-
-var Duplex;
-/**/
-
-Writable.WritableState = WritableState;
-/**/
-
-var internalUtil = {
- deprecate: require('util-deprecate')
-};
-/**/
-
-/**/
-
-var Stream = require('./internal/streams/stream');
-/**/
-
-
-var Buffer = require('buffer').Buffer;
-
-var OurUint8Array = global.Uint8Array || function () {};
-
-function _uint8ArrayToBuffer(chunk) {
- return Buffer.from(chunk);
-}
-
-function _isUint8Array(obj) {
- return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
-
-var destroyImpl = require('./internal/streams/destroy');
-
-var _require = require('./internal/streams/state'),
- getHighWaterMark = _require.getHighWaterMark;
-
-var _require$codes = require('../errors').codes,
- ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
- ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
- ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
- ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
- ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
- ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
- ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
- ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
-
-var errorOrDestroy = destroyImpl.errorOrDestroy;
-
-require('inherits')(Writable, Stream);
-
-function nop() {}
-
-function WritableState(options, stream, isDuplex) {
- Duplex = Duplex || require('./_stream_duplex');
- options = options || {}; // Duplex streams are both readable and writable, but share
- // the same options object.
- // However, some cases require setting options to different
- // values for the readable and the writable sides of the duplex stream,
- // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
-
- if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream
- // contains buffers or objects.
-
- this.objectMode = !!options.objectMode;
- if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false
- // Note: 0 is a valid value, means that we always return false if
- // the entire buffer is not flushed immediately on write()
-
- this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called
-
- this.finalCalled = false; // drain event flag.
-
- this.needDrain = false; // at the start of calling end()
-
- this.ending = false; // when end() has been called, and returned
-
- this.ended = false; // when 'finish' is emitted
-
- this.finished = false; // has it been destroyed
-
- this.destroyed = false; // should we decode strings into buffers before passing to _write?
- // this is here so that some node-core streams can optimize string
- // handling at a lower level.
-
- var noDecode = options.decodeStrings === false;
- this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string
- // encoding is 'binary' so we have to make this configurable.
- // Everything else in the universe uses 'utf8', though.
-
- this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement
- // of how much we're waiting to get pushed to some underlying
- // socket or file.
-
- this.length = 0; // a flag to see when we're in the middle of a write.
-
- this.writing = false; // when true all writes will be buffered until .uncork() call
-
- this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,
- // or on a later tick. We set this to true at first, because any
- // actions that shouldn't happen until "later" should generally also
- // not happen before the first write call.
-
- this.sync = true; // a flag to know if we're processing previously buffered items, which
- // may call the _write() callback in the same tick, so that we don't
- // end up in an overlapped onwrite situation.
-
- this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)
-
- this.onwrite = function (er) {
- onwrite(stream, er);
- }; // the callback that the user supplies to write(chunk,encoding,cb)
-
-
- this.writecb = null; // the amount that is being written when _write is called.
-
- this.writelen = 0;
- this.bufferedRequest = null;
- this.lastBufferedRequest = null; // number of pending user-supplied write callbacks
- // this must be 0 before 'finish' can be emitted
-
- this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs
- // This is relevant for synchronous Transform streams
-
- this.prefinished = false; // True if the error was already emitted and should not be thrown again
-
- this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.
-
- this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end')
-
- this.autoDestroy = !!options.autoDestroy; // count buffered requests
-
- this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always
- // one allocated and free to use, and we maintain at most two
-
- this.corkedRequestsFree = new CorkedRequest(this);
-}
-
-WritableState.prototype.getBuffer = function getBuffer() {
- var current = this.bufferedRequest;
- var out = [];
-
- while (current) {
- out.push(current);
- current = current.next;
- }
-
- return out;
-};
-
-(function () {
- try {
- Object.defineProperty(WritableState.prototype, 'buffer', {
- get: internalUtil.deprecate(function writableStateBufferGetter() {
- return this.getBuffer();
- }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
- });
- } catch (_) {}
-})(); // Test _writableState for inheritance to account for Duplex streams,
-// whose prototype chain only points to Readable.
-
-
-var realHasInstance;
-
-if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
- realHasInstance = Function.prototype[Symbol.hasInstance];
- Object.defineProperty(Writable, Symbol.hasInstance, {
- value: function value(object) {
- if (realHasInstance.call(this, object)) return true;
- if (this !== Writable) return false;
- return object && object._writableState instanceof WritableState;
- }
- });
-} else {
- realHasInstance = function realHasInstance(object) {
- return object instanceof this;
- };
-}
-
-function Writable(options) {
- Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too.
- // `realHasInstance` is necessary because using plain `instanceof`
- // would return false, as no `_writableState` property is attached.
- // Trying to use the custom `instanceof` for Writable here will also break the
- // Node.js LazyTransform implementation, which has a non-trivial getter for
- // `_writableState` that would lead to infinite recursion.
- // Checking for a Stream.Duplex instance is faster here instead of inside
- // the WritableState constructor, at least with V8 6.5
-
- var isDuplex = this instanceof Duplex;
- if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
- this._writableState = new WritableState(options, this, isDuplex); // legacy.
-
- this.writable = true;
-
- if (options) {
- if (typeof options.write === 'function') this._write = options.write;
- if (typeof options.writev === 'function') this._writev = options.writev;
- if (typeof options.destroy === 'function') this._destroy = options.destroy;
- if (typeof options.final === 'function') this._final = options.final;
- }
-
- Stream.call(this);
-} // Otherwise people can pipe Writable streams, which is just wrong.
-
-
-Writable.prototype.pipe = function () {
- errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());
-};
-
-function writeAfterEnd(stream, cb) {
- var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb
-
- errorOrDestroy(stream, er);
- process.nextTick(cb, er);
-} // Checks that a user-supplied chunk is valid, especially for the particular
-// mode the stream is in. Currently this means that `null` is never accepted
-// and undefined/non-string values are only allowed in object mode.
-
-
-function validChunk(stream, state, chunk, cb) {
- var er;
-
- if (chunk === null) {
- er = new ERR_STREAM_NULL_VALUES();
- } else if (typeof chunk !== 'string' && !state.objectMode) {
- er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
- }
-
- if (er) {
- errorOrDestroy(stream, er);
- process.nextTick(cb, er);
- return false;
- }
-
- return true;
-}
-
-Writable.prototype.write = function (chunk, encoding, cb) {
- var state = this._writableState;
- var ret = false;
-
- var isBuf = !state.objectMode && _isUint8Array(chunk);
-
- if (isBuf && !Buffer.isBuffer(chunk)) {
- chunk = _uint8ArrayToBuffer(chunk);
- }
-
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = null;
- }
-
- if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
- if (typeof cb !== 'function') cb = nop;
- if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
- state.pendingcb++;
- ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
- }
- return ret;
-};
-
-Writable.prototype.cork = function () {
- this._writableState.corked++;
-};
-
-Writable.prototype.uncork = function () {
- var state = this._writableState;
-
- if (state.corked) {
- state.corked--;
- if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
- }
-};
-
-Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
- // node::ParseEncoding() requires lower case.
- if (typeof encoding === 'string') encoding = encoding.toLowerCase();
- if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
- this._writableState.defaultEncoding = encoding;
- return this;
-};
-
-Object.defineProperty(Writable.prototype, 'writableBuffer', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState && this._writableState.getBuffer();
- }
-});
-
-function decodeChunk(state, chunk, encoding) {
- if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
- chunk = Buffer.from(chunk, encoding);
- }
-
- return chunk;
-}
-
-Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState.highWaterMark;
- }
-}); // if we're already writing something, then just put this
-// in the queue, and wait our turn. Otherwise, call _write
-// If we return false, then we need a drain event, so set that flag.
-
-function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
- if (!isBuf) {
- var newChunk = decodeChunk(state, chunk, encoding);
-
- if (chunk !== newChunk) {
- isBuf = true;
- encoding = 'buffer';
- chunk = newChunk;
- }
- }
-
- var len = state.objectMode ? 1 : chunk.length;
- state.length += len;
- var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.
-
- if (!ret) state.needDrain = true;
-
- if (state.writing || state.corked) {
- var last = state.lastBufferedRequest;
- state.lastBufferedRequest = {
- chunk: chunk,
- encoding: encoding,
- isBuf: isBuf,
- callback: cb,
- next: null
- };
-
- if (last) {
- last.next = state.lastBufferedRequest;
- } else {
- state.bufferedRequest = state.lastBufferedRequest;
- }
-
- state.bufferedRequestCount += 1;
- } else {
- doWrite(stream, state, false, len, chunk, encoding, cb);
- }
-
- return ret;
-}
-
-function doWrite(stream, state, writev, len, chunk, encoding, cb) {
- state.writelen = len;
- state.writecb = cb;
- state.writing = true;
- state.sync = true;
- if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
- state.sync = false;
-}
-
-function onwriteError(stream, state, sync, er, cb) {
- --state.pendingcb;
-
- if (sync) {
- // defer the callback if we are being called synchronously
- // to avoid piling up things on the stack
- process.nextTick(cb, er); // this can emit finish, and it will always happen
- // after error
-
- process.nextTick(finishMaybe, stream, state);
- stream._writableState.errorEmitted = true;
- errorOrDestroy(stream, er);
- } else {
- // the caller expect this to happen before if
- // it is async
- cb(er);
- stream._writableState.errorEmitted = true;
- errorOrDestroy(stream, er); // this can emit finish, but finish must
- // always follow error
-
- finishMaybe(stream, state);
- }
-}
-
-function onwriteStateUpdate(state) {
- state.writing = false;
- state.writecb = null;
- state.length -= state.writelen;
- state.writelen = 0;
-}
-
-function onwrite(stream, er) {
- var state = stream._writableState;
- var sync = state.sync;
- var cb = state.writecb;
- if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
- onwriteStateUpdate(state);
- if (er) onwriteError(stream, state, sync, er, cb);else {
- // Check if we're actually ready to finish, but don't emit yet
- var finished = needFinish(state) || stream.destroyed;
-
- if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
- clearBuffer(stream, state);
- }
-
- if (sync) {
- process.nextTick(afterWrite, stream, state, finished, cb);
- } else {
- afterWrite(stream, state, finished, cb);
- }
- }
-}
-
-function afterWrite(stream, state, finished, cb) {
- if (!finished) onwriteDrain(stream, state);
- state.pendingcb--;
- cb();
- finishMaybe(stream, state);
-} // Must force callback to be called on nextTick, so that we don't
-// emit 'drain' before the write() consumer gets the 'false' return
-// value, and has a chance to attach a 'drain' listener.
-
-
-function onwriteDrain(stream, state) {
- if (state.length === 0 && state.needDrain) {
- state.needDrain = false;
- stream.emit('drain');
- }
-} // if there's something in the buffer waiting, then process it
-
-
-function clearBuffer(stream, state) {
- state.bufferProcessing = true;
- var entry = state.bufferedRequest;
-
- if (stream._writev && entry && entry.next) {
- // Fast case, write everything using _writev()
- var l = state.bufferedRequestCount;
- var buffer = new Array(l);
- var holder = state.corkedRequestsFree;
- holder.entry = entry;
- var count = 0;
- var allBuffers = true;
-
- while (entry) {
- buffer[count] = entry;
- if (!entry.isBuf) allBuffers = false;
- entry = entry.next;
- count += 1;
- }
-
- buffer.allBuffers = allBuffers;
- doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time
- // as the hot path ends with doWrite
-
- state.pendingcb++;
- state.lastBufferedRequest = null;
-
- if (holder.next) {
- state.corkedRequestsFree = holder.next;
- holder.next = null;
- } else {
- state.corkedRequestsFree = new CorkedRequest(state);
- }
-
- state.bufferedRequestCount = 0;
- } else {
- // Slow case, write chunks one-by-one
- while (entry) {
- var chunk = entry.chunk;
- var encoding = entry.encoding;
- var cb = entry.callback;
- var len = state.objectMode ? 1 : chunk.length;
- doWrite(stream, state, false, len, chunk, encoding, cb);
- entry = entry.next;
- state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then
- // it means that we need to wait until it does.
- // also, that means that the chunk and cb are currently
- // being processed, so move the buffer counter past them.
-
- if (state.writing) {
- break;
- }
- }
-
- if (entry === null) state.lastBufferedRequest = null;
- }
-
- state.bufferedRequest = entry;
- state.bufferProcessing = false;
-}
-
-Writable.prototype._write = function (chunk, encoding, cb) {
- cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
-};
-
-Writable.prototype._writev = null;
-
-Writable.prototype.end = function (chunk, encoding, cb) {
- var state = this._writableState;
-
- if (typeof chunk === 'function') {
- cb = chunk;
- chunk = null;
- encoding = null;
- } else if (typeof encoding === 'function') {
- cb = encoding;
- encoding = null;
- }
-
- if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks
-
- if (state.corked) {
- state.corked = 1;
- this.uncork();
- } // ignore unnecessary end() calls.
-
-
- if (!state.ending) endWritable(this, state, cb);
- return this;
-};
-
-Object.defineProperty(Writable.prototype, 'writableLength', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState.length;
- }
-});
-
-function needFinish(state) {
- return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
-}
-
-function callFinal(stream, state) {
- stream._final(function (err) {
- state.pendingcb--;
-
- if (err) {
- errorOrDestroy(stream, err);
- }
-
- state.prefinished = true;
- stream.emit('prefinish');
- finishMaybe(stream, state);
- });
-}
-
-function prefinish(stream, state) {
- if (!state.prefinished && !state.finalCalled) {
- if (typeof stream._final === 'function' && !state.destroyed) {
- state.pendingcb++;
- state.finalCalled = true;
- process.nextTick(callFinal, stream, state);
- } else {
- state.prefinished = true;
- stream.emit('prefinish');
- }
- }
-}
-
-function finishMaybe(stream, state) {
- var need = needFinish(state);
-
- if (need) {
- prefinish(stream, state);
-
- if (state.pendingcb === 0) {
- state.finished = true;
- stream.emit('finish');
-
- if (state.autoDestroy) {
- // In case of duplex streams we need a way to detect
- // if the readable side is ready for autoDestroy as well
- var rState = stream._readableState;
-
- if (!rState || rState.autoDestroy && rState.endEmitted) {
- stream.destroy();
- }
- }
- }
- }
-
- return need;
-}
-
-function endWritable(stream, state, cb) {
- state.ending = true;
- finishMaybe(stream, state);
-
- if (cb) {
- if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
- }
-
- state.ended = true;
- stream.writable = false;
-}
-
-function onCorkedFinish(corkReq, state, err) {
- var entry = corkReq.entry;
- corkReq.entry = null;
-
- while (entry) {
- var cb = entry.callback;
- state.pendingcb--;
- cb(err);
- entry = entry.next;
- } // reuse the free corkReq.
-
-
- state.corkedRequestsFree.next = corkReq;
-}
-
-Object.defineProperty(Writable.prototype, 'destroyed', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- if (this._writableState === undefined) {
- return false;
- }
-
- return this._writableState.destroyed;
- },
- set: function set(value) {
- // we ignore the value if the stream
- // has not been initialized yet
- if (!this._writableState) {
- return;
- } // backward compatibility, the user is explicitly
- // managing destroyed
-
-
- this._writableState.destroyed = value;
- }
-});
-Writable.prototype.destroy = destroyImpl.destroy;
-Writable.prototype._undestroy = destroyImpl.undestroy;
-
-Writable.prototype._destroy = function (err, cb) {
- cb(err);
-};
-}).call(this)}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"../errors":30,"./_stream_duplex":31,"./internal/streams/destroy":38,"./internal/streams/state":42,"./internal/streams/stream":43,"_process":27,"buffer":10,"inherits":23,"util-deprecate":46}],36:[function(require,module,exports){
-(function (process){(function (){
-'use strict';
-
-var _Object$setPrototypeO;
-
-function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-
-var finished = require('./end-of-stream');
-
-var kLastResolve = Symbol('lastResolve');
-var kLastReject = Symbol('lastReject');
-var kError = Symbol('error');
-var kEnded = Symbol('ended');
-var kLastPromise = Symbol('lastPromise');
-var kHandlePromise = Symbol('handlePromise');
-var kStream = Symbol('stream');
-
-function createIterResult(value, done) {
- return {
- value: value,
- done: done
- };
-}
-
-function readAndResolve(iter) {
- var resolve = iter[kLastResolve];
-
- if (resolve !== null) {
- var data = iter[kStream].read(); // we defer if data is null
- // we can be expecting either 'end' or
- // 'error'
-
- if (data !== null) {
- iter[kLastPromise] = null;
- iter[kLastResolve] = null;
- iter[kLastReject] = null;
- resolve(createIterResult(data, false));
- }
- }
-}
-
-function onReadable(iter) {
- // we wait for the next tick, because it might
- // emit an error with process.nextTick
- process.nextTick(readAndResolve, iter);
-}
-
-function wrapForNext(lastPromise, iter) {
- return function (resolve, reject) {
- lastPromise.then(function () {
- if (iter[kEnded]) {
- resolve(createIterResult(undefined, true));
- return;
- }
-
- iter[kHandlePromise](resolve, reject);
- }, reject);
- };
-}
-
-var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
-var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
- get stream() {
- return this[kStream];
- },
-
- next: function next() {
- var _this = this;
-
- // if we have detected an error in the meanwhile
- // reject straight away
- var error = this[kError];
-
- if (error !== null) {
- return Promise.reject(error);
- }
-
- if (this[kEnded]) {
- return Promise.resolve(createIterResult(undefined, true));
- }
-
- if (this[kStream].destroyed) {
- // We need to defer via nextTick because if .destroy(err) is
- // called, the error will be emitted via nextTick, and
- // we cannot guarantee that there is no error lingering around
- // waiting to be emitted.
- return new Promise(function (resolve, reject) {
- process.nextTick(function () {
- if (_this[kError]) {
- reject(_this[kError]);
- } else {
- resolve(createIterResult(undefined, true));
- }
- });
- });
- } // if we have multiple next() calls
- // we will wait for the previous Promise to finish
- // this logic is optimized to support for await loops,
- // where next() is only called once at a time
-
-
- var lastPromise = this[kLastPromise];
- var promise;
-
- if (lastPromise) {
- promise = new Promise(wrapForNext(lastPromise, this));
- } else {
- // fast path needed to support multiple this.push()
- // without triggering the next() queue
- var data = this[kStream].read();
-
- if (data !== null) {
- return Promise.resolve(createIterResult(data, false));
- }
-
- promise = new Promise(this[kHandlePromise]);
- }
-
- this[kLastPromise] = promise;
- return promise;
- }
-}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
- return this;
-}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
- var _this2 = this;
-
- // destroy(err, cb) is a private API
- // we can guarantee we have that here, because we control the
- // Readable class this is attached to
- return new Promise(function (resolve, reject) {
- _this2[kStream].destroy(null, function (err) {
- if (err) {
- reject(err);
- return;
- }
-
- resolve(createIterResult(undefined, true));
- });
- });
-}), _Object$setPrototypeO), AsyncIteratorPrototype);
-
-var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {
- var _Object$create;
-
- var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
- value: stream,
- writable: true
- }), _defineProperty(_Object$create, kLastResolve, {
- value: null,
- writable: true
- }), _defineProperty(_Object$create, kLastReject, {
- value: null,
- writable: true
- }), _defineProperty(_Object$create, kError, {
- value: null,
- writable: true
- }), _defineProperty(_Object$create, kEnded, {
- value: stream._readableState.endEmitted,
- writable: true
- }), _defineProperty(_Object$create, kHandlePromise, {
- value: function value(resolve, reject) {
- var data = iterator[kStream].read();
-
- if (data) {
- iterator[kLastPromise] = null;
- iterator[kLastResolve] = null;
- iterator[kLastReject] = null;
- resolve(createIterResult(data, false));
- } else {
- iterator[kLastResolve] = resolve;
- iterator[kLastReject] = reject;
- }
- },
- writable: true
- }), _Object$create));
- iterator[kLastPromise] = null;
- finished(stream, function (err) {
- if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
- var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise
- // returned by next() and store the error
-
- if (reject !== null) {
- iterator[kLastPromise] = null;
- iterator[kLastResolve] = null;
- iterator[kLastReject] = null;
- reject(err);
- }
-
- iterator[kError] = err;
- return;
- }
-
- var resolve = iterator[kLastResolve];
-
- if (resolve !== null) {
- iterator[kLastPromise] = null;
- iterator[kLastResolve] = null;
- iterator[kLastReject] = null;
- resolve(createIterResult(undefined, true));
- }
-
- iterator[kEnded] = true;
- });
- stream.on('readable', onReadable.bind(null, iterator));
- return iterator;
-};
-
-module.exports = createReadableStreamAsyncIterator;
-}).call(this)}).call(this,require('_process'))
-},{"./end-of-stream":39,"_process":27}],37:[function(require,module,exports){
-'use strict';
-
-function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
-
-function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
-
-function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-var _require = require('buffer'),
- Buffer = _require.Buffer;
-
-var _require2 = require('util'),
- inspect = _require2.inspect;
-
-var custom = inspect && inspect.custom || 'inspect';
-
-function copyBuffer(src, target, offset) {
- Buffer.prototype.copy.call(src, target, offset);
-}
-
-module.exports =
-/*#__PURE__*/
-function () {
- function BufferList() {
- _classCallCheck(this, BufferList);
-
- this.head = null;
- this.tail = null;
- this.length = 0;
- }
-
- _createClass(BufferList, [{
- key: "push",
- value: function push(v) {
- var entry = {
- data: v,
- next: null
- };
- if (this.length > 0) this.tail.next = entry;else this.head = entry;
- this.tail = entry;
- ++this.length;
- }
- }, {
- key: "unshift",
- value: function unshift(v) {
- var entry = {
- data: v,
- next: this.head
- };
- if (this.length === 0) this.tail = entry;
- this.head = entry;
- ++this.length;
- }
- }, {
- key: "shift",
- value: function shift() {
- if (this.length === 0) return;
- var ret = this.head.data;
- if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
- --this.length;
- return ret;
- }
- }, {
- key: "clear",
- value: function clear() {
- this.head = this.tail = null;
- this.length = 0;
- }
- }, {
- key: "join",
- value: function join(s) {
- if (this.length === 0) return '';
- var p = this.head;
- var ret = '' + p.data;
-
- while (p = p.next) {
- ret += s + p.data;
- }
-
- return ret;
- }
- }, {
- key: "concat",
- value: function concat(n) {
- if (this.length === 0) return Buffer.alloc(0);
- var ret = Buffer.allocUnsafe(n >>> 0);
- var p = this.head;
- var i = 0;
-
- while (p) {
- copyBuffer(p.data, ret, i);
- i += p.data.length;
- p = p.next;
- }
-
- return ret;
- } // Consumes a specified amount of bytes or characters from the buffered data.
-
- }, {
- key: "consume",
- value: function consume(n, hasStrings) {
- var ret;
-
- if (n < this.head.data.length) {
- // `slice` is the same for buffers and strings.
- ret = this.head.data.slice(0, n);
- this.head.data = this.head.data.slice(n);
- } else if (n === this.head.data.length) {
- // First chunk is a perfect match.
- ret = this.shift();
- } else {
- // Result spans more than one buffer.
- ret = hasStrings ? this._getString(n) : this._getBuffer(n);
- }
-
- return ret;
- }
- }, {
- key: "first",
- value: function first() {
- return this.head.data;
- } // Consumes a specified amount of characters from the buffered data.
-
- }, {
- key: "_getString",
- value: function _getString(n) {
- var p = this.head;
- var c = 1;
- var ret = p.data;
- n -= ret.length;
-
- while (p = p.next) {
- var str = p.data;
- var nb = n > str.length ? str.length : n;
- if (nb === str.length) ret += str;else ret += str.slice(0, n);
- n -= nb;
-
- if (n === 0) {
- if (nb === str.length) {
- ++c;
- if (p.next) this.head = p.next;else this.head = this.tail = null;
- } else {
- this.head = p;
- p.data = str.slice(nb);
- }
-
- break;
- }
-
- ++c;
- }
-
- this.length -= c;
- return ret;
- } // Consumes a specified amount of bytes from the buffered data.
-
- }, {
- key: "_getBuffer",
- value: function _getBuffer(n) {
- var ret = Buffer.allocUnsafe(n);
- var p = this.head;
- var c = 1;
- p.data.copy(ret);
- n -= p.data.length;
-
- while (p = p.next) {
- var buf = p.data;
- var nb = n > buf.length ? buf.length : n;
- buf.copy(ret, ret.length - n, 0, nb);
- n -= nb;
-
- if (n === 0) {
- if (nb === buf.length) {
- ++c;
- if (p.next) this.head = p.next;else this.head = this.tail = null;
- } else {
- this.head = p;
- p.data = buf.slice(nb);
- }
-
- break;
- }
-
- ++c;
- }
-
- this.length -= c;
- return ret;
- } // Make sure the linked list only shows the minimal necessary information.
-
- }, {
- key: custom,
- value: function value(_, options) {
- return inspect(this, _objectSpread({}, options, {
- // Only inspect one level.
- depth: 0,
- // It should not recurse.
- customInspect: false
- }));
- }
- }]);
-
- return BufferList;
-}();
-},{"buffer":10,"util":9}],38:[function(require,module,exports){
-(function (process){(function (){
-'use strict'; // undocumented cb() API, needed for core, not for public API
-
-function destroy(err, cb) {
- var _this = this;
-
- var readableDestroyed = this._readableState && this._readableState.destroyed;
- var writableDestroyed = this._writableState && this._writableState.destroyed;
-
- if (readableDestroyed || writableDestroyed) {
- if (cb) {
- cb(err);
- } else if (err) {
- if (!this._writableState) {
- process.nextTick(emitErrorNT, this, err);
- } else if (!this._writableState.errorEmitted) {
- this._writableState.errorEmitted = true;
- process.nextTick(emitErrorNT, this, err);
- }
- }
-
- return this;
- } // we set destroyed to true before firing error callbacks in order
- // to make it re-entrance safe in case destroy() is called within callbacks
-
-
- if (this._readableState) {
- this._readableState.destroyed = true;
- } // if this is a duplex stream mark the writable part as destroyed as well
-
-
- if (this._writableState) {
- this._writableState.destroyed = true;
- }
-
- this._destroy(err || null, function (err) {
- if (!cb && err) {
- if (!_this._writableState) {
- process.nextTick(emitErrorAndCloseNT, _this, err);
- } else if (!_this._writableState.errorEmitted) {
- _this._writableState.errorEmitted = true;
- process.nextTick(emitErrorAndCloseNT, _this, err);
- } else {
- process.nextTick(emitCloseNT, _this);
- }
- } else if (cb) {
- process.nextTick(emitCloseNT, _this);
- cb(err);
- } else {
- process.nextTick(emitCloseNT, _this);
- }
- });
-
- return this;
-}
-
-function emitErrorAndCloseNT(self, err) {
- emitErrorNT(self, err);
- emitCloseNT(self);
-}
-
-function emitCloseNT(self) {
- if (self._writableState && !self._writableState.emitClose) return;
- if (self._readableState && !self._readableState.emitClose) return;
- self.emit('close');
-}
-
-function undestroy() {
- if (this._readableState) {
- this._readableState.destroyed = false;
- this._readableState.reading = false;
- this._readableState.ended = false;
- this._readableState.endEmitted = false;
- }
-
- if (this._writableState) {
- this._writableState.destroyed = false;
- this._writableState.ended = false;
- this._writableState.ending = false;
- this._writableState.finalCalled = false;
- this._writableState.prefinished = false;
- this._writableState.finished = false;
- this._writableState.errorEmitted = false;
- }
-}
-
-function emitErrorNT(self, err) {
- self.emit('error', err);
-}
-
-function errorOrDestroy(stream, err) {
- // We have tests that rely on errors being emitted
- // in the same tick, so changing this is semver major.
- // For now when you opt-in to autoDestroy we allow
- // the error to be emitted nextTick. In a future
- // semver major update we should change the default to this.
- var rState = stream._readableState;
- var wState = stream._writableState;
- if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);
-}
-
-module.exports = {
- destroy: destroy,
- undestroy: undestroy,
- errorOrDestroy: errorOrDestroy
-};
-}).call(this)}).call(this,require('_process'))
-},{"_process":27}],39:[function(require,module,exports){
-// Ported from https://github.com/mafintosh/end-of-stream with
-// permission from the author, Mathias Buus (@mafintosh).
-'use strict';
-
-var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;
-
-function once(callback) {
- var called = false;
- return function () {
- if (called) return;
- called = true;
-
- for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
- args[_key] = arguments[_key];
- }
-
- callback.apply(this, args);
- };
-}
-
-function noop() {}
-
-function isRequest(stream) {
- return stream.setHeader && typeof stream.abort === 'function';
-}
-
-function eos(stream, opts, callback) {
- if (typeof opts === 'function') return eos(stream, null, opts);
- if (!opts) opts = {};
- callback = once(callback || noop);
- var readable = opts.readable || opts.readable !== false && stream.readable;
- var writable = opts.writable || opts.writable !== false && stream.writable;
-
- var onlegacyfinish = function onlegacyfinish() {
- if (!stream.writable) onfinish();
- };
-
- var writableEnded = stream._writableState && stream._writableState.finished;
-
- var onfinish = function onfinish() {
- writable = false;
- writableEnded = true;
- if (!readable) callback.call(stream);
- };
-
- var readableEnded = stream._readableState && stream._readableState.endEmitted;
-
- var onend = function onend() {
- readable = false;
- readableEnded = true;
- if (!writable) callback.call(stream);
- };
-
- var onerror = function onerror(err) {
- callback.call(stream, err);
- };
-
- var onclose = function onclose() {
- var err;
-
- if (readable && !readableEnded) {
- if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
- return callback.call(stream, err);
- }
-
- if (writable && !writableEnded) {
- if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
- return callback.call(stream, err);
- }
- };
-
- var onrequest = function onrequest() {
- stream.req.on('finish', onfinish);
- };
-
- if (isRequest(stream)) {
- stream.on('complete', onfinish);
- stream.on('abort', onclose);
- if (stream.req) onrequest();else stream.on('request', onrequest);
- } else if (writable && !stream._writableState) {
- // legacy streams
- stream.on('end', onlegacyfinish);
- stream.on('close', onlegacyfinish);
- }
-
- stream.on('end', onend);
- stream.on('finish', onfinish);
- if (opts.error !== false) stream.on('error', onerror);
- stream.on('close', onclose);
- return function () {
- stream.removeListener('complete', onfinish);
- stream.removeListener('abort', onclose);
- stream.removeListener('request', onrequest);
- if (stream.req) stream.req.removeListener('finish', onfinish);
- stream.removeListener('end', onlegacyfinish);
- stream.removeListener('close', onlegacyfinish);
- stream.removeListener('finish', onfinish);
- stream.removeListener('end', onend);
- stream.removeListener('error', onerror);
- stream.removeListener('close', onclose);
- };
-}
-
-module.exports = eos;
-},{"../../../errors":30}],40:[function(require,module,exports){
-module.exports = function () {
- throw new Error('Readable.from is not available in the browser')
-};
-
-},{}],41:[function(require,module,exports){
-// Ported from https://github.com/mafintosh/pump with
-// permission from the author, Mathias Buus (@mafintosh).
-'use strict';
-
-var eos;
-
-function once(callback) {
- var called = false;
- return function () {
- if (called) return;
- called = true;
- callback.apply(void 0, arguments);
- };
-}
-
-var _require$codes = require('../../../errors').codes,
- ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
- ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
-
-function noop(err) {
- // Rethrow the error if it exists to avoid swallowing it
- if (err) throw err;
-}
-
-function isRequest(stream) {
- return stream.setHeader && typeof stream.abort === 'function';
-}
-
-function destroyer(stream, reading, writing, callback) {
- callback = once(callback);
- var closed = false;
- stream.on('close', function () {
- closed = true;
- });
- if (eos === undefined) eos = require('./end-of-stream');
- eos(stream, {
- readable: reading,
- writable: writing
- }, function (err) {
- if (err) return callback(err);
- closed = true;
- callback();
- });
- var destroyed = false;
- return function (err) {
- if (closed) return;
- if (destroyed) return;
- destroyed = true; // request.destroy just do .end - .abort is what we want
-
- if (isRequest(stream)) return stream.abort();
- if (typeof stream.destroy === 'function') return stream.destroy();
- callback(err || new ERR_STREAM_DESTROYED('pipe'));
- };
-}
-
-function call(fn) {
- fn();
-}
-
-function pipe(from, to) {
- return from.pipe(to);
-}
-
-function popCallback(streams) {
- if (!streams.length) return noop;
- if (typeof streams[streams.length - 1] !== 'function') return noop;
- return streams.pop();
-}
-
-function pipeline() {
- for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
- streams[_key] = arguments[_key];
- }
-
- var callback = popCallback(streams);
- if (Array.isArray(streams[0])) streams = streams[0];
-
- if (streams.length < 2) {
- throw new ERR_MISSING_ARGS('streams');
- }
-
- var error;
- var destroys = streams.map(function (stream, i) {
- var reading = i < streams.length - 1;
- var writing = i > 0;
- return destroyer(stream, reading, writing, function (err) {
- if (!error) error = err;
- if (err) destroys.forEach(call);
- if (reading) return;
- destroys.forEach(call);
- callback(error);
- });
- });
- return streams.reduce(pipe);
-}
-
-module.exports = pipeline;
-},{"../../../errors":30,"./end-of-stream":39}],42:[function(require,module,exports){
-'use strict';
-
-var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;
-
-function highWaterMarkFrom(options, isDuplex, duplexKey) {
- return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
-}
-
-function getHighWaterMark(state, options, duplexKey, isDuplex) {
- var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
-
- if (hwm != null) {
- if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
- var name = isDuplex ? duplexKey : 'highWaterMark';
- throw new ERR_INVALID_OPT_VALUE(name, hwm);
- }
-
- return Math.floor(hwm);
- } // Default value
-
-
- return state.objectMode ? 16 : 16 * 1024;
-}
-
-module.exports = {
- getHighWaterMark: getHighWaterMark
-};
-},{"../../../errors":30}],43:[function(require,module,exports){
-module.exports = require('events').EventEmitter;
-
-},{"events":14}],44:[function(require,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-'use strict';
-
-/**/
-
-var Buffer = require('safe-buffer').Buffer;
-/**/
-
-var isEncoding = Buffer.isEncoding || function (encoding) {
- encoding = '' + encoding;
- switch (encoding && encoding.toLowerCase()) {
- case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
- return true;
- default:
- return false;
- }
-};
-
-function _normalizeEncoding(enc) {
- if (!enc) return 'utf8';
- var retried;
- while (true) {
- switch (enc) {
- case 'utf8':
- case 'utf-8':
- return 'utf8';
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return 'utf16le';
- case 'latin1':
- case 'binary':
- return 'latin1';
- case 'base64':
- case 'ascii':
- case 'hex':
- return enc;
- default:
- if (retried) return; // undefined
- enc = ('' + enc).toLowerCase();
- retried = true;
- }
- }
-};
-
-// Do not cache `Buffer.isEncoding` when checking encoding names as some
-// modules monkey-patch it to support additional encodings
-function normalizeEncoding(enc) {
- var nenc = _normalizeEncoding(enc);
- if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
- return nenc || enc;
-}
-
-// StringDecoder provides an interface for efficiently splitting a series of
-// buffers into a series of JS strings without breaking apart multi-byte
-// characters.
-exports.StringDecoder = StringDecoder;
-function StringDecoder(encoding) {
- this.encoding = normalizeEncoding(encoding);
- var nb;
- switch (this.encoding) {
- case 'utf16le':
- this.text = utf16Text;
- this.end = utf16End;
- nb = 4;
- break;
- case 'utf8':
- this.fillLast = utf8FillLast;
- nb = 4;
- break;
- case 'base64':
- this.text = base64Text;
- this.end = base64End;
- nb = 3;
- break;
- default:
- this.write = simpleWrite;
- this.end = simpleEnd;
- return;
- }
- this.lastNeed = 0;
- this.lastTotal = 0;
- this.lastChar = Buffer.allocUnsafe(nb);
-}
-
-StringDecoder.prototype.write = function (buf) {
- if (buf.length === 0) return '';
- var r;
- var i;
- if (this.lastNeed) {
- r = this.fillLast(buf);
- if (r === undefined) return '';
- i = this.lastNeed;
- this.lastNeed = 0;
- } else {
- i = 0;
- }
- if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
- return r || '';
-};
-
-StringDecoder.prototype.end = utf8End;
-
-// Returns only complete characters in a Buffer
-StringDecoder.prototype.text = utf8Text;
-
-// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
-StringDecoder.prototype.fillLast = function (buf) {
- if (this.lastNeed <= buf.length) {
- buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
- return this.lastChar.toString(this.encoding, 0, this.lastTotal);
- }
- buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
- this.lastNeed -= buf.length;
-};
-
-// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
-// continuation byte. If an invalid byte is detected, -2 is returned.
-function utf8CheckByte(byte) {
- if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
- return byte >> 6 === 0x02 ? -1 : -2;
-}
-
-// Checks at most 3 bytes at the end of a Buffer in order to detect an
-// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
-// needed to complete the UTF-8 character (if applicable) are returned.
-function utf8CheckIncomplete(self, buf, i) {
- var j = buf.length - 1;
- if (j < i) return 0;
- var nb = utf8CheckByte(buf[j]);
- if (nb >= 0) {
- if (nb > 0) self.lastNeed = nb - 1;
- return nb;
- }
- if (--j < i || nb === -2) return 0;
- nb = utf8CheckByte(buf[j]);
- if (nb >= 0) {
- if (nb > 0) self.lastNeed = nb - 2;
- return nb;
- }
- if (--j < i || nb === -2) return 0;
- nb = utf8CheckByte(buf[j]);
- if (nb >= 0) {
- if (nb > 0) {
- if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
- }
- return nb;
- }
- return 0;
-}
-
-// Validates as many continuation bytes for a multi-byte UTF-8 character as
-// needed or are available. If we see a non-continuation byte where we expect
-// one, we "replace" the validated continuation bytes we've seen so far with
-// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
-// behavior. The continuation byte check is included three times in the case
-// where all of the continuation bytes for a character exist in the same buffer.
-// It is also done this way as a slight performance increase instead of using a
-// loop.
-function utf8CheckExtraBytes(self, buf, p) {
- if ((buf[0] & 0xC0) !== 0x80) {
- self.lastNeed = 0;
- return '\ufffd';
- }
- if (self.lastNeed > 1 && buf.length > 1) {
- if ((buf[1] & 0xC0) !== 0x80) {
- self.lastNeed = 1;
- return '\ufffd';
- }
- if (self.lastNeed > 2 && buf.length > 2) {
- if ((buf[2] & 0xC0) !== 0x80) {
- self.lastNeed = 2;
- return '\ufffd';
- }
- }
- }
-}
-
-// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
-function utf8FillLast(buf) {
- var p = this.lastTotal - this.lastNeed;
- var r = utf8CheckExtraBytes(this, buf, p);
- if (r !== undefined) return r;
- if (this.lastNeed <= buf.length) {
- buf.copy(this.lastChar, p, 0, this.lastNeed);
- return this.lastChar.toString(this.encoding, 0, this.lastTotal);
- }
- buf.copy(this.lastChar, p, 0, buf.length);
- this.lastNeed -= buf.length;
-}
-
-// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
-// partial character, the character's bytes are buffered until the required
-// number of bytes are available.
-function utf8Text(buf, i) {
- var total = utf8CheckIncomplete(this, buf, i);
- if (!this.lastNeed) return buf.toString('utf8', i);
- this.lastTotal = total;
- var end = buf.length - (total - this.lastNeed);
- buf.copy(this.lastChar, 0, end);
- return buf.toString('utf8', i, end);
-}
-
-// For UTF-8, a replacement character is added when ending on a partial
-// character.
-function utf8End(buf) {
- var r = buf && buf.length ? this.write(buf) : '';
- if (this.lastNeed) return r + '\ufffd';
- return r;
-}
-
-// UTF-16LE typically needs two bytes per character, but even if we have an even
-// number of bytes available, we need to check if we end on a leading/high
-// surrogate. In that case, we need to wait for the next two bytes in order to
-// decode the last character properly.
-function utf16Text(buf, i) {
- if ((buf.length - i) % 2 === 0) {
- var r = buf.toString('utf16le', i);
- if (r) {
- var c = r.charCodeAt(r.length - 1);
- if (c >= 0xD800 && c <= 0xDBFF) {
- this.lastNeed = 2;
- this.lastTotal = 4;
- this.lastChar[0] = buf[buf.length - 2];
- this.lastChar[1] = buf[buf.length - 1];
- return r.slice(0, -1);
- }
- }
- return r;
- }
- this.lastNeed = 1;
- this.lastTotal = 2;
- this.lastChar[0] = buf[buf.length - 1];
- return buf.toString('utf16le', i, buf.length - 1);
-}
-
-// For UTF-16LE we do not explicitly append special replacement characters if we
-// end on a partial character, we simply let v8 handle that.
-function utf16End(buf) {
- var r = buf && buf.length ? this.write(buf) : '';
- if (this.lastNeed) {
- var end = this.lastTotal - this.lastNeed;
- return r + this.lastChar.toString('utf16le', 0, end);
- }
- return r;
-}
-
-function base64Text(buf, i) {
- var n = (buf.length - i) % 3;
- if (n === 0) return buf.toString('base64', i);
- this.lastNeed = 3 - n;
- this.lastTotal = 3;
- if (n === 1) {
- this.lastChar[0] = buf[buf.length - 1];
- } else {
- this.lastChar[0] = buf[buf.length - 2];
- this.lastChar[1] = buf[buf.length - 1];
- }
- return buf.toString('base64', i, buf.length - n);
-}
-
-function base64End(buf) {
- var r = buf && buf.length ? this.write(buf) : '';
- if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
- return r;
-}
-
-// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
-function simpleWrite(buf) {
- return buf.toString(this.encoding);
-}
-
-function simpleEnd(buf) {
- return buf && buf.length ? this.write(buf) : '';
-}
-},{"safe-buffer":28}],45:[function(require,module,exports){
-(function (setImmediate,clearImmediate){(function (){
-var nextTick = require('process/browser.js').nextTick;
-var apply = Function.prototype.apply;
-var slice = Array.prototype.slice;
-var immediateIds = {};
-var nextImmediateId = 0;
-
-// DOM APIs, for completeness
-
-exports.setTimeout = function() {
- return new Timeout(apply.call(setTimeout, window, arguments), clearTimeout);
-};
-exports.setInterval = function() {
- return new Timeout(apply.call(setInterval, window, arguments), clearInterval);
-};
-exports.clearTimeout =
-exports.clearInterval = function(timeout) { timeout.close(); };
-
-function Timeout(id, clearFn) {
- this._id = id;
- this._clearFn = clearFn;
-}
-Timeout.prototype.unref = Timeout.prototype.ref = function() {};
-Timeout.prototype.close = function() {
- this._clearFn.call(window, this._id);
-};
-
-// Does not start the time, just sets up the members needed.
-exports.enroll = function(item, msecs) {
- clearTimeout(item._idleTimeoutId);
- item._idleTimeout = msecs;
-};
-
-exports.unenroll = function(item) {
- clearTimeout(item._idleTimeoutId);
- item._idleTimeout = -1;
-};
-
-exports._unrefActive = exports.active = function(item) {
- clearTimeout(item._idleTimeoutId);
-
- var msecs = item._idleTimeout;
- if (msecs >= 0) {
- item._idleTimeoutId = setTimeout(function onTimeout() {
- if (item._onTimeout)
- item._onTimeout();
- }, msecs);
- }
-};
-
-// That's not how node.js implements it but the exposed api is the same.
-exports.setImmediate = typeof setImmediate === "function" ? setImmediate : function(fn) {
- var id = nextImmediateId++;
- var args = arguments.length < 2 ? false : slice.call(arguments, 1);
-
- immediateIds[id] = true;
-
- nextTick(function onNextTick() {
- if (immediateIds[id]) {
- // fn.call() is faster so we optimize for the common use-case
- // @see http://jsperf.com/call-apply-segu
- if (args) {
- fn.apply(null, args);
- } else {
- fn.call(null);
- }
- // Prevent ids from leaking
- exports.clearImmediate(id);
- }
- });
-
- return id;
-};
-
-exports.clearImmediate = typeof clearImmediate === "function" ? clearImmediate : function(id) {
- delete immediateIds[id];
-};
-}).call(this)}).call(this,require("timers").setImmediate,require("timers").clearImmediate)
-},{"process/browser.js":27,"timers":45}],46:[function(require,module,exports){
-(function (global){(function (){
-
-/**
- * Module exports.
- */
-
-module.exports = deprecate;
-
-/**
- * Mark that a method should not be used.
- * Returns a modified function which warns once by default.
- *
- * If `localStorage.noDeprecation = true` is set, then it is a no-op.
- *
- * If `localStorage.throwDeprecation = true` is set, then deprecated functions
- * will throw an Error when invoked.
- *
- * If `localStorage.traceDeprecation = true` is set, then deprecated functions
- * will invoke `console.trace()` instead of `console.error()`.
- *
- * @param {Function} fn - the function to deprecate
- * @param {String} msg - the string to print to the console when `fn` is invoked
- * @returns {Function} a new "deprecated" version of `fn`
- * @api public
- */
-
-function deprecate (fn, msg) {
- if (config('noDeprecation')) {
- return fn;
- }
-
- var warned = false;
- function deprecated() {
- if (!warned) {
- if (config('throwDeprecation')) {
- throw new Error(msg);
- } else if (config('traceDeprecation')) {
- console.trace(msg);
- } else {
- console.warn(msg);
- }
- warned = true;
- }
- return fn.apply(this, arguments);
- }
-
- return deprecated;
-}
-
-/**
- * Checks `localStorage` for boolean values for the given `name`.
- *
- * @param {String} name
- * @returns {Boolean}
- * @api private
- */
-
-function config (name) {
- // accessing global.localStorage can trigger a DOMException in sandboxed iframes
- try {
- if (!global.localStorage) return false;
- } catch (_) {
- return false;
- }
- var val = global.localStorage[name];
- if (null == val) return false;
- return String(val).toLowerCase() === 'true';
-}
-
-}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{}],47:[function(require,module,exports){
-module.exports = function isBuffer(arg) {
- return arg && typeof arg === 'object'
- && typeof arg.copy === 'function'
- && typeof arg.fill === 'function'
- && typeof arg.readUInt8 === 'function';
-}
-},{}],48:[function(require,module,exports){
-// Currently in sync with Node.js lib/internal/util/types.js
-// https://github.com/nodejs/node/commit/112cc7c27551254aa2b17098fb774867f05ed0d9
-
-'use strict';
-
-var isArgumentsObject = require('is-arguments');
-var isGeneratorFunction = require('is-generator-function');
-var whichTypedArray = require('which-typed-array');
-var isTypedArray = require('is-typed-array');
-
-function uncurryThis(f) {
- return f.call.bind(f);
-}
-
-var BigIntSupported = typeof BigInt !== 'undefined';
-var SymbolSupported = typeof Symbol !== 'undefined';
-
-var ObjectToString = uncurryThis(Object.prototype.toString);
-
-var numberValue = uncurryThis(Number.prototype.valueOf);
-var stringValue = uncurryThis(String.prototype.valueOf);
-var booleanValue = uncurryThis(Boolean.prototype.valueOf);
-
-if (BigIntSupported) {
- var bigIntValue = uncurryThis(BigInt.prototype.valueOf);
-}
-
-if (SymbolSupported) {
- var symbolValue = uncurryThis(Symbol.prototype.valueOf);
-}
-
-function checkBoxedPrimitive(value, prototypeValueOf) {
- if (typeof value !== 'object') {
- return false;
- }
- try {
- prototypeValueOf(value);
- return true;
- } catch(e) {
- return false;
- }
-}
-
-exports.isArgumentsObject = isArgumentsObject;
-exports.isGeneratorFunction = isGeneratorFunction;
-exports.isTypedArray = isTypedArray;
-
-// Taken from here and modified for better browser support
-// https://github.com/sindresorhus/p-is-promise/blob/cda35a513bda03f977ad5cde3a079d237e82d7ef/index.js
-function isPromise(input) {
- return (
- (
- typeof Promise !== 'undefined' &&
- input instanceof Promise
- ) ||
- (
- input !== null &&
- typeof input === 'object' &&
- typeof input.then === 'function' &&
- typeof input.catch === 'function'
- )
- );
-}
-exports.isPromise = isPromise;
-
-function isArrayBufferView(value) {
- if (typeof ArrayBuffer !== 'undefined' && ArrayBuffer.isView) {
- return ArrayBuffer.isView(value);
- }
-
- return (
- isTypedArray(value) ||
- isDataView(value)
- );
-}
-exports.isArrayBufferView = isArrayBufferView;
-
-
-function isUint8Array(value) {
- return whichTypedArray(value) === 'Uint8Array';
-}
-exports.isUint8Array = isUint8Array;
-
-function isUint8ClampedArray(value) {
- return whichTypedArray(value) === 'Uint8ClampedArray';
-}
-exports.isUint8ClampedArray = isUint8ClampedArray;
-
-function isUint16Array(value) {
- return whichTypedArray(value) === 'Uint16Array';
-}
-exports.isUint16Array = isUint16Array;
-
-function isUint32Array(value) {
- return whichTypedArray(value) === 'Uint32Array';
-}
-exports.isUint32Array = isUint32Array;
-
-function isInt8Array(value) {
- return whichTypedArray(value) === 'Int8Array';
-}
-exports.isInt8Array = isInt8Array;
-
-function isInt16Array(value) {
- return whichTypedArray(value) === 'Int16Array';
-}
-exports.isInt16Array = isInt16Array;
-
-function isInt32Array(value) {
- return whichTypedArray(value) === 'Int32Array';
-}
-exports.isInt32Array = isInt32Array;
-
-function isFloat32Array(value) {
- return whichTypedArray(value) === 'Float32Array';
-}
-exports.isFloat32Array = isFloat32Array;
-
-function isFloat64Array(value) {
- return whichTypedArray(value) === 'Float64Array';
-}
-exports.isFloat64Array = isFloat64Array;
-
-function isBigInt64Array(value) {
- return whichTypedArray(value) === 'BigInt64Array';
-}
-exports.isBigInt64Array = isBigInt64Array;
-
-function isBigUint64Array(value) {
- return whichTypedArray(value) === 'BigUint64Array';
-}
-exports.isBigUint64Array = isBigUint64Array;
-
-function isMapToString(value) {
- return ObjectToString(value) === '[object Map]';
-}
-isMapToString.working = (
- typeof Map !== 'undefined' &&
- isMapToString(new Map())
-);
-
-function isMap(value) {
- if (typeof Map === 'undefined') {
- return false;
- }
-
- return isMapToString.working
- ? isMapToString(value)
- : value instanceof Map;
-}
-exports.isMap = isMap;
-
-function isSetToString(value) {
- return ObjectToString(value) === '[object Set]';
-}
-isSetToString.working = (
- typeof Set !== 'undefined' &&
- isSetToString(new Set())
-);
-function isSet(value) {
- if (typeof Set === 'undefined') {
- return false;
- }
-
- return isSetToString.working
- ? isSetToString(value)
- : value instanceof Set;
-}
-exports.isSet = isSet;
-
-function isWeakMapToString(value) {
- return ObjectToString(value) === '[object WeakMap]';
-}
-isWeakMapToString.working = (
- typeof WeakMap !== 'undefined' &&
- isWeakMapToString(new WeakMap())
-);
-function isWeakMap(value) {
- if (typeof WeakMap === 'undefined') {
- return false;
- }
-
- return isWeakMapToString.working
- ? isWeakMapToString(value)
- : value instanceof WeakMap;
-}
-exports.isWeakMap = isWeakMap;
-
-function isWeakSetToString(value) {
- return ObjectToString(value) === '[object WeakSet]';
-}
-isWeakSetToString.working = (
- typeof WeakSet !== 'undefined' &&
- isWeakSetToString(new WeakSet())
-);
-function isWeakSet(value) {
- return isWeakSetToString(value);
-}
-exports.isWeakSet = isWeakSet;
-
-function isArrayBufferToString(value) {
- return ObjectToString(value) === '[object ArrayBuffer]';
-}
-isArrayBufferToString.working = (
- typeof ArrayBuffer !== 'undefined' &&
- isArrayBufferToString(new ArrayBuffer())
-);
-function isArrayBuffer(value) {
- if (typeof ArrayBuffer === 'undefined') {
- return false;
- }
-
- return isArrayBufferToString.working
- ? isArrayBufferToString(value)
- : value instanceof ArrayBuffer;
-}
-exports.isArrayBuffer = isArrayBuffer;
-
-function isDataViewToString(value) {
- return ObjectToString(value) === '[object DataView]';
-}
-isDataViewToString.working = (
- typeof ArrayBuffer !== 'undefined' &&
- typeof DataView !== 'undefined' &&
- isDataViewToString(new DataView(new ArrayBuffer(1), 0, 1))
-);
-function isDataView(value) {
- if (typeof DataView === 'undefined') {
- return false;
- }
-
- return isDataViewToString.working
- ? isDataViewToString(value)
- : value instanceof DataView;
-}
-exports.isDataView = isDataView;
-
-// Store a copy of SharedArrayBuffer in case it's deleted elsewhere
-var SharedArrayBufferCopy = typeof SharedArrayBuffer !== 'undefined' ? SharedArrayBuffer : undefined;
-function isSharedArrayBufferToString(value) {
- return ObjectToString(value) === '[object SharedArrayBuffer]';
-}
-function isSharedArrayBuffer(value) {
- if (typeof SharedArrayBufferCopy === 'undefined') {
- return false;
- }
-
- if (typeof isSharedArrayBufferToString.working === 'undefined') {
- isSharedArrayBufferToString.working = isSharedArrayBufferToString(new SharedArrayBufferCopy());
- }
-
- return isSharedArrayBufferToString.working
- ? isSharedArrayBufferToString(value)
- : value instanceof SharedArrayBufferCopy;
-}
-exports.isSharedArrayBuffer = isSharedArrayBuffer;
-
-function isAsyncFunction(value) {
- return ObjectToString(value) === '[object AsyncFunction]';
-}
-exports.isAsyncFunction = isAsyncFunction;
-
-function isMapIterator(value) {
- return ObjectToString(value) === '[object Map Iterator]';
-}
-exports.isMapIterator = isMapIterator;
-
-function isSetIterator(value) {
- return ObjectToString(value) === '[object Set Iterator]';
-}
-exports.isSetIterator = isSetIterator;
-
-function isGeneratorObject(value) {
- return ObjectToString(value) === '[object Generator]';
-}
-exports.isGeneratorObject = isGeneratorObject;
-
-function isWebAssemblyCompiledModule(value) {
- return ObjectToString(value) === '[object WebAssembly.Module]';
-}
-exports.isWebAssemblyCompiledModule = isWebAssemblyCompiledModule;
-
-function isNumberObject(value) {
- return checkBoxedPrimitive(value, numberValue);
-}
-exports.isNumberObject = isNumberObject;
-
-function isStringObject(value) {
- return checkBoxedPrimitive(value, stringValue);
-}
-exports.isStringObject = isStringObject;
-
-function isBooleanObject(value) {
- return checkBoxedPrimitive(value, booleanValue);
-}
-exports.isBooleanObject = isBooleanObject;
-
-function isBigIntObject(value) {
- return BigIntSupported && checkBoxedPrimitive(value, bigIntValue);
-}
-exports.isBigIntObject = isBigIntObject;
-
-function isSymbolObject(value) {
- return SymbolSupported && checkBoxedPrimitive(value, symbolValue);
-}
-exports.isSymbolObject = isSymbolObject;
-
-function isBoxedPrimitive(value) {
- return (
- isNumberObject(value) ||
- isStringObject(value) ||
- isBooleanObject(value) ||
- isBigIntObject(value) ||
- isSymbolObject(value)
- );
-}
-exports.isBoxedPrimitive = isBoxedPrimitive;
-
-function isAnyArrayBuffer(value) {
- return typeof Uint8Array !== 'undefined' && (
- isArrayBuffer(value) ||
- isSharedArrayBuffer(value)
- );
-}
-exports.isAnyArrayBuffer = isAnyArrayBuffer;
-
-['isProxy', 'isExternal', 'isModuleNamespaceObject'].forEach(function(method) {
- Object.defineProperty(exports, method, {
- enumerable: false,
- value: function() {
- throw new Error(method + ' is not supported in userland');
- }
- });
-});
-
-},{"is-arguments":24,"is-generator-function":25,"is-typed-array":26,"which-typed-array":50}],49:[function(require,module,exports){
-(function (process){(function (){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-var getOwnPropertyDescriptors = Object.getOwnPropertyDescriptors ||
- function getOwnPropertyDescriptors(obj) {
- var keys = Object.keys(obj);
- var descriptors = {};
- for (var i = 0; i < keys.length; i++) {
- descriptors[keys[i]] = Object.getOwnPropertyDescriptor(obj, keys[i]);
- }
- return descriptors;
- };
-
-var formatRegExp = /%[sdj%]/g;
-exports.format = function(f) {
- if (!isString(f)) {
- var objects = [];
- for (var i = 0; i < arguments.length; i++) {
- objects.push(inspect(arguments[i]));
- }
- return objects.join(' ');
- }
-
- var i = 1;
- var args = arguments;
- var len = args.length;
- var str = String(f).replace(formatRegExp, function(x) {
- if (x === '%%') return '%';
- if (i >= len) return x;
- switch (x) {
- case '%s': return String(args[i++]);
- case '%d': return Number(args[i++]);
- case '%j':
- try {
- return JSON.stringify(args[i++]);
- } catch (_) {
- return '[Circular]';
- }
- default:
- return x;
- }
- });
- for (var x = args[i]; i < len; x = args[++i]) {
- if (isNull(x) || !isObject(x)) {
- str += ' ' + x;
- } else {
- str += ' ' + inspect(x);
- }
- }
- return str;
-};
-
-
-// Mark that a method should not be used.
-// Returns a modified function which warns once by default.
-// If --no-deprecation is set, then it is a no-op.
-exports.deprecate = function(fn, msg) {
- if (typeof process !== 'undefined' && process.noDeprecation === true) {
- return fn;
- }
-
- // Allow for deprecating things in the process of starting up.
- if (typeof process === 'undefined') {
- return function() {
- return exports.deprecate(fn, msg).apply(this, arguments);
- };
- }
-
- var warned = false;
- function deprecated() {
- if (!warned) {
- if (process.throwDeprecation) {
- throw new Error(msg);
- } else if (process.traceDeprecation) {
- console.trace(msg);
- } else {
- console.error(msg);
- }
- warned = true;
- }
- return fn.apply(this, arguments);
- }
-
- return deprecated;
-};
-
-
-var debugs = {};
-var debugEnvRegex = /^$/;
-
-if (process.env.NODE_DEBUG) {
- var debugEnv = process.env.NODE_DEBUG;
- debugEnv = debugEnv.replace(/[|\\{}()[\]^$+?.]/g, '\\$&')
- .replace(/\*/g, '.*')
- .replace(/,/g, '$|^')
- .toUpperCase();
- debugEnvRegex = new RegExp('^' + debugEnv + '$', 'i');
-}
-exports.debuglog = function(set) {
- set = set.toUpperCase();
- if (!debugs[set]) {
- if (debugEnvRegex.test(set)) {
- var pid = process.pid;
- debugs[set] = function() {
- var msg = exports.format.apply(exports, arguments);
- console.error('%s %d: %s', set, pid, msg);
- };
- } else {
- debugs[set] = function() {};
- }
- }
- return debugs[set];
-};
-
-
-/**
- * Echos the value of a value. Trys to print the value out
- * in the best way possible given the different types.
- *
- * @param {Object} obj The object to print out.
- * @param {Object} opts Optional options object that alters the output.
- */
-/* legacy: obj, showHidden, depth, colors*/
-function inspect(obj, opts) {
- // default options
- var ctx = {
- seen: [],
- stylize: stylizeNoColor
- };
- // legacy...
- if (arguments.length >= 3) ctx.depth = arguments[2];
- if (arguments.length >= 4) ctx.colors = arguments[3];
- if (isBoolean(opts)) {
- // legacy...
- ctx.showHidden = opts;
- } else if (opts) {
- // got an "options" object
- exports._extend(ctx, opts);
- }
- // set default options
- if (isUndefined(ctx.showHidden)) ctx.showHidden = false;
- if (isUndefined(ctx.depth)) ctx.depth = 2;
- if (isUndefined(ctx.colors)) ctx.colors = false;
- if (isUndefined(ctx.customInspect)) ctx.customInspect = true;
- if (ctx.colors) ctx.stylize = stylizeWithColor;
- return formatValue(ctx, obj, ctx.depth);
-}
-exports.inspect = inspect;
-
-
-// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics
-inspect.colors = {
- 'bold' : [1, 22],
- 'italic' : [3, 23],
- 'underline' : [4, 24],
- 'inverse' : [7, 27],
- 'white' : [37, 39],
- 'grey' : [90, 39],
- 'black' : [30, 39],
- 'blue' : [34, 39],
- 'cyan' : [36, 39],
- 'green' : [32, 39],
- 'magenta' : [35, 39],
- 'red' : [31, 39],
- 'yellow' : [33, 39]
-};
-
-// Don't use 'blue' not visible on cmd.exe
-inspect.styles = {
- 'special': 'cyan',
- 'number': 'yellow',
- 'boolean': 'yellow',
- 'undefined': 'grey',
- 'null': 'bold',
- 'string': 'green',
- 'date': 'magenta',
- // "name": intentionally not styling
- 'regexp': 'red'
-};
-
-
-function stylizeWithColor(str, styleType) {
- var style = inspect.styles[styleType];
-
- if (style) {
- return '\u001b[' + inspect.colors[style][0] + 'm' + str +
- '\u001b[' + inspect.colors[style][1] + 'm';
- } else {
- return str;
- }
-}
-
-
-function stylizeNoColor(str, styleType) {
- return str;
-}
-
-
-function arrayToHash(array) {
- var hash = {};
-
- array.forEach(function(val, idx) {
- hash[val] = true;
- });
-
- return hash;
-}
-
-
-function formatValue(ctx, value, recurseTimes) {
- // Provide a hook for user-specified inspect functions.
- // Check that value is an object with an inspect function on it
- if (ctx.customInspect &&
- value &&
- isFunction(value.inspect) &&
- // Filter out the util module, it's inspect function is special
- value.inspect !== exports.inspect &&
- // Also filter out any prototype objects using the circular check.
- !(value.constructor && value.constructor.prototype === value)) {
- var ret = value.inspect(recurseTimes, ctx);
- if (!isString(ret)) {
- ret = formatValue(ctx, ret, recurseTimes);
- }
- return ret;
- }
-
- // Primitive types cannot have properties
- var primitive = formatPrimitive(ctx, value);
- if (primitive) {
- return primitive;
- }
-
- // Look up the keys of the object.
- var keys = Object.keys(value);
- var visibleKeys = arrayToHash(keys);
-
- if (ctx.showHidden) {
- keys = Object.getOwnPropertyNames(value);
- }
-
- // IE doesn't make error fields non-enumerable
- // http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx
- if (isError(value)
- && (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) {
- return formatError(value);
- }
-
- // Some type of object without properties can be shortcutted.
- if (keys.length === 0) {
- if (isFunction(value)) {
- var name = value.name ? ': ' + value.name : '';
- return ctx.stylize('[Function' + name + ']', 'special');
- }
- if (isRegExp(value)) {
- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
- }
- if (isDate(value)) {
- return ctx.stylize(Date.prototype.toString.call(value), 'date');
- }
- if (isError(value)) {
- return formatError(value);
- }
- }
-
- var base = '', array = false, braces = ['{', '}'];
-
- // Make Array say that they are Array
- if (isArray(value)) {
- array = true;
- braces = ['[', ']'];
- }
-
- // Make functions say that they are functions
- if (isFunction(value)) {
- var n = value.name ? ': ' + value.name : '';
- base = ' [Function' + n + ']';
- }
-
- // Make RegExps say that they are RegExps
- if (isRegExp(value)) {
- base = ' ' + RegExp.prototype.toString.call(value);
- }
-
- // Make dates with properties first say the date
- if (isDate(value)) {
- base = ' ' + Date.prototype.toUTCString.call(value);
- }
-
- // Make error with message first say the error
- if (isError(value)) {
- base = ' ' + formatError(value);
- }
-
- if (keys.length === 0 && (!array || value.length == 0)) {
- return braces[0] + base + braces[1];
- }
-
- if (recurseTimes < 0) {
- if (isRegExp(value)) {
- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
- } else {
- return ctx.stylize('[Object]', 'special');
- }
- }
-
- ctx.seen.push(value);
-
- var output;
- if (array) {
- output = formatArray(ctx, value, recurseTimes, visibleKeys, keys);
- } else {
- output = keys.map(function(key) {
- return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array);
- });
- }
-
- ctx.seen.pop();
-
- return reduceToSingleString(output, base, braces);
-}
-
-
-function formatPrimitive(ctx, value) {
- if (isUndefined(value))
- return ctx.stylize('undefined', 'undefined');
- if (isString(value)) {
- var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '')
- .replace(/'/g, "\\'")
- .replace(/\\"/g, '"') + '\'';
- return ctx.stylize(simple, 'string');
- }
- if (isNumber(value))
- return ctx.stylize('' + value, 'number');
- if (isBoolean(value))
- return ctx.stylize('' + value, 'boolean');
- // For some reason typeof null is "object", so special case here.
- if (isNull(value))
- return ctx.stylize('null', 'null');
-}
-
-
-function formatError(value) {
- return '[' + Error.prototype.toString.call(value) + ']';
-}
-
-
-function formatArray(ctx, value, recurseTimes, visibleKeys, keys) {
- var output = [];
- for (var i = 0, l = value.length; i < l; ++i) {
- if (hasOwnProperty(value, String(i))) {
- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
- String(i), true));
- } else {
- output.push('');
- }
- }
- keys.forEach(function(key) {
- if (!key.match(/^\d+$/)) {
- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
- key, true));
- }
- });
- return output;
-}
-
-
-function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) {
- var name, str, desc;
- desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] };
- if (desc.get) {
- if (desc.set) {
- str = ctx.stylize('[Getter/Setter]', 'special');
- } else {
- str = ctx.stylize('[Getter]', 'special');
- }
- } else {
- if (desc.set) {
- str = ctx.stylize('[Setter]', 'special');
- }
- }
- if (!hasOwnProperty(visibleKeys, key)) {
- name = '[' + key + ']';
- }
- if (!str) {
- if (ctx.seen.indexOf(desc.value) < 0) {
- if (isNull(recurseTimes)) {
- str = formatValue(ctx, desc.value, null);
- } else {
- str = formatValue(ctx, desc.value, recurseTimes - 1);
- }
- if (str.indexOf('\n') > -1) {
- if (array) {
- str = str.split('\n').map(function(line) {
- return ' ' + line;
- }).join('\n').substr(2);
- } else {
- str = '\n' + str.split('\n').map(function(line) {
- return ' ' + line;
- }).join('\n');
- }
- }
- } else {
- str = ctx.stylize('[Circular]', 'special');
- }
- }
- if (isUndefined(name)) {
- if (array && key.match(/^\d+$/)) {
- return str;
- }
- name = JSON.stringify('' + key);
- if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) {
- name = name.substr(1, name.length - 2);
- name = ctx.stylize(name, 'name');
- } else {
- name = name.replace(/'/g, "\\'")
- .replace(/\\"/g, '"')
- .replace(/(^"|"$)/g, "'");
- name = ctx.stylize(name, 'string');
- }
- }
-
- return name + ': ' + str;
-}
-
-
-function reduceToSingleString(output, base, braces) {
- var numLinesEst = 0;
- var length = output.reduce(function(prev, cur) {
- numLinesEst++;
- if (cur.indexOf('\n') >= 0) numLinesEst++;
- return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1;
- }, 0);
-
- if (length > 60) {
- return braces[0] +
- (base === '' ? '' : base + '\n ') +
- ' ' +
- output.join(',\n ') +
- ' ' +
- braces[1];
- }
-
- return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1];
-}
-
-
-// NOTE: These type checking functions intentionally don't use `instanceof`
-// because it is fragile and can be easily faked with `Object.create()`.
-exports.types = require('./support/types');
-
-function isArray(ar) {
- return Array.isArray(ar);
-}
-exports.isArray = isArray;
-
-function isBoolean(arg) {
- return typeof arg === 'boolean';
-}
-exports.isBoolean = isBoolean;
-
-function isNull(arg) {
- return arg === null;
-}
-exports.isNull = isNull;
-
-function isNullOrUndefined(arg) {
- return arg == null;
-}
-exports.isNullOrUndefined = isNullOrUndefined;
-
-function isNumber(arg) {
- return typeof arg === 'number';
-}
-exports.isNumber = isNumber;
-
-function isString(arg) {
- return typeof arg === 'string';
-}
-exports.isString = isString;
-
-function isSymbol(arg) {
- return typeof arg === 'symbol';
-}
-exports.isSymbol = isSymbol;
-
-function isUndefined(arg) {
- return arg === void 0;
-}
-exports.isUndefined = isUndefined;
-
-function isRegExp(re) {
- return isObject(re) && objectToString(re) === '[object RegExp]';
-}
-exports.isRegExp = isRegExp;
-exports.types.isRegExp = isRegExp;
-
-function isObject(arg) {
- return typeof arg === 'object' && arg !== null;
-}
-exports.isObject = isObject;
-
-function isDate(d) {
- return isObject(d) && objectToString(d) === '[object Date]';
-}
-exports.isDate = isDate;
-exports.types.isDate = isDate;
-
-function isError(e) {
- return isObject(e) &&
- (objectToString(e) === '[object Error]' || e instanceof Error);
-}
-exports.isError = isError;
-exports.types.isNativeError = isError;
-
-function isFunction(arg) {
- return typeof arg === 'function';
-}
-exports.isFunction = isFunction;
-
-function isPrimitive(arg) {
- return arg === null ||
- typeof arg === 'boolean' ||
- typeof arg === 'number' ||
- typeof arg === 'string' ||
- typeof arg === 'symbol' || // ES6 symbol
- typeof arg === 'undefined';
-}
-exports.isPrimitive = isPrimitive;
-
-exports.isBuffer = require('./support/isBuffer');
-
-function objectToString(o) {
- return Object.prototype.toString.call(o);
-}
-
-
-function pad(n) {
- return n < 10 ? '0' + n.toString(10) : n.toString(10);
-}
-
-
-var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep',
- 'Oct', 'Nov', 'Dec'];
-
-// 26 Feb 16:19:34
-function timestamp() {
- var d = new Date();
- var time = [pad(d.getHours()),
- pad(d.getMinutes()),
- pad(d.getSeconds())].join(':');
- return [d.getDate(), months[d.getMonth()], time].join(' ');
-}
-
-
-// log is just a thin wrapper to console.log that prepends a timestamp
-exports.log = function() {
- console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments));
-};
-
-
-/**
- * Inherit the prototype methods from one constructor into another.
- *
- * The Function.prototype.inherits from lang.js rewritten as a standalone
- * function (not on Function.prototype). NOTE: If this file is to be loaded
- * during bootstrapping this function needs to be rewritten using some native
- * functions as prototype setup using normal JavaScript does not work as
- * expected during bootstrapping (see mirror.js in r114903).
- *
- * @param {function} ctor Constructor function which needs to inherit the
- * prototype.
- * @param {function} superCtor Constructor function to inherit prototype from.
- */
-exports.inherits = require('inherits');
-
-exports._extend = function(origin, add) {
- // Don't do anything if add isn't an object
- if (!add || !isObject(add)) return origin;
-
- var keys = Object.keys(add);
- var i = keys.length;
- while (i--) {
- origin[keys[i]] = add[keys[i]];
- }
- return origin;
-};
-
-function hasOwnProperty(obj, prop) {
- return Object.prototype.hasOwnProperty.call(obj, prop);
-}
-
-var kCustomPromisifiedSymbol = typeof Symbol !== 'undefined' ? Symbol('util.promisify.custom') : undefined;
-
-exports.promisify = function promisify(original) {
- if (typeof original !== 'function')
- throw new TypeError('The "original" argument must be of type Function');
-
- if (kCustomPromisifiedSymbol && original[kCustomPromisifiedSymbol]) {
- var fn = original[kCustomPromisifiedSymbol];
- if (typeof fn !== 'function') {
- throw new TypeError('The "util.promisify.custom" argument must be of type Function');
- }
- Object.defineProperty(fn, kCustomPromisifiedSymbol, {
- value: fn, enumerable: false, writable: false, configurable: true
- });
- return fn;
- }
-
- function fn() {
- var promiseResolve, promiseReject;
- var promise = new Promise(function (resolve, reject) {
- promiseResolve = resolve;
- promiseReject = reject;
- });
-
- var args = [];
- for (var i = 0; i < arguments.length; i++) {
- args.push(arguments[i]);
- }
- args.push(function (err, value) {
- if (err) {
- promiseReject(err);
- } else {
- promiseResolve(value);
- }
- });
-
- try {
- original.apply(this, args);
- } catch (err) {
- promiseReject(err);
- }
-
- return promise;
- }
-
- Object.setPrototypeOf(fn, Object.getPrototypeOf(original));
-
- if (kCustomPromisifiedSymbol) Object.defineProperty(fn, kCustomPromisifiedSymbol, {
- value: fn, enumerable: false, writable: false, configurable: true
- });
- return Object.defineProperties(
- fn,
- getOwnPropertyDescriptors(original)
- );
-}
-
-exports.promisify.custom = kCustomPromisifiedSymbol
-
-function callbackifyOnRejected(reason, cb) {
- // `!reason` guard inspired by bluebird (Ref: https://goo.gl/t5IS6M).
- // Because `null` is a special error value in callbacks which means "no error
- // occurred", we error-wrap so the callback consumer can distinguish between
- // "the promise rejected with null" or "the promise fulfilled with undefined".
- if (!reason) {
- var newReason = new Error('Promise was rejected with a falsy value');
- newReason.reason = reason;
- reason = newReason;
- }
- return cb(reason);
-}
-
-function callbackify(original) {
- if (typeof original !== 'function') {
- throw new TypeError('The "original" argument must be of type Function');
- }
-
- // We DO NOT return the promise as it gives the user a false sense that
- // the promise is actually somehow related to the callback's execution
- // and that the callback throwing will reject the promise.
- function callbackified() {
- var args = [];
- for (var i = 0; i < arguments.length; i++) {
- args.push(arguments[i]);
- }
-
- var maybeCb = args.pop();
- if (typeof maybeCb !== 'function') {
- throw new TypeError('The last argument must be of type Function');
- }
- var self = this;
- var cb = function() {
- return maybeCb.apply(self, arguments);
- };
- // In true node style we process the callback on `nextTick` with all the
- // implications (stack, `uncaughtException`, `async_hooks`)
- original.apply(this, args)
- .then(function(ret) { process.nextTick(cb.bind(null, null, ret)) },
- function(rej) { process.nextTick(callbackifyOnRejected.bind(null, rej, cb)) });
- }
-
- Object.setPrototypeOf(callbackified, Object.getPrototypeOf(original));
- Object.defineProperties(callbackified,
- getOwnPropertyDescriptors(original));
- return callbackified;
-}
-exports.callbackify = callbackify;
-
-}).call(this)}).call(this,require('_process'))
-},{"./support/isBuffer":47,"./support/types":48,"_process":27,"inherits":23}],50:[function(require,module,exports){
-(function (global){(function (){
-'use strict';
-
-var forEach = require('foreach');
-var availableTypedArrays = require('available-typed-arrays');
-var callBound = require('call-bind/callBound');
-
-var $toString = callBound('Object.prototype.toString');
-var hasSymbols = require('has-symbols')();
-var hasToStringTag = hasSymbols && typeof Symbol.toStringTag === 'symbol';
-
-var typedArrays = availableTypedArrays();
-
-var $slice = callBound('String.prototype.slice');
-var toStrTags = {};
-var gOPD = require('es-abstract/helpers/getOwnPropertyDescriptor');
-var getPrototypeOf = Object.getPrototypeOf; // require('getprototypeof');
-if (hasToStringTag && gOPD && getPrototypeOf) {
- forEach(typedArrays, function (typedArray) {
- if (typeof global[typedArray] === 'function') {
- var arr = new global[typedArray]();
- if (!(Symbol.toStringTag in arr)) {
- throw new EvalError('this engine has support for Symbol.toStringTag, but ' + typedArray + ' does not have the property! Please report this.');
- }
- var proto = getPrototypeOf(arr);
- var descriptor = gOPD(proto, Symbol.toStringTag);
- if (!descriptor) {
- var superProto = getPrototypeOf(proto);
- descriptor = gOPD(superProto, Symbol.toStringTag);
- }
- toStrTags[typedArray] = descriptor.get;
- }
- });
-}
-
-var tryTypedArrays = function tryAllTypedArrays(value) {
- var foundName = false;
- forEach(toStrTags, function (getter, typedArray) {
- if (!foundName) {
- try {
- var name = getter.call(value);
- if (name === typedArray) {
- foundName = name;
- }
- } catch (e) {}
- }
- });
- return foundName;
-};
-
-var isTypedArray = require('is-typed-array');
-
-module.exports = function whichTypedArray(value) {
- if (!isTypedArray(value)) { return false; }
- if (!hasToStringTag) { return $slice($toString(value), 8, -1); }
- return tryTypedArrays(value);
-};
-
-}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"available-typed-arrays":7,"call-bind/callBound":11,"es-abstract/helpers/getOwnPropertyDescriptor":13,"foreach":15,"has-symbols":19,"is-typed-array":26}],51:[function(require,module,exports){
-(function (setImmediate){(function (){
-// Generated by CoffeeScript 2.5.1
-// # Stream Transformer
-
-// Pass all elements of an array or a stream to transform, filter and add. Features include:
-
-// * Extends the Node.js "stream.Transform" API.
-// * Both synchrounous and asynchronous support based and user callback
-// arguments signature.
-// * Ability to skip records.
-// * Sequential and concurrent execution using the "parallel" options.
-
-// Please look at the [README], the [samples] and the [tests] for additional
-// information.
-var Transformer, clone, stream, util;
-
-stream = require('stream');
-
-util = require('util');
-
-({clone} = require('mixme'));
-
-// ## Usage
-
-// Callback approach, for ease of use:
-
-// `transform(records, [options], handler, callback)`
-
-// Stream API, for maximum of power:
-
-// `transform([records], [options], handler, [callback])`
-module.exports = function() {
- var argument, callback, error, handler, i, j, len, options, records, result, transform, type;
- options = {};
- for (i = j = 0, len = arguments.length; j < len; i = ++j) {
- argument = arguments[i];
- type = typeof argument;
- if (argument === null) {
- type = 'null';
- } else if (type === 'object' && Array.isArray(argument)) {
- type = 'array';
- }
- if (type === 'array') {
- records = argument;
- } else if (type === 'object') {
- options = clone(argument);
- } else if (type === 'function') {
- if (handler && i === arguments.length - 1) {
- callback = argument;
- } else {
- handler = argument;
- }
- } else if (type !== 'null') {
- throw new Error(`Invalid Arguments: got ${JSON.stringify(argument)} at position ${i}`);
- }
- }
- transform = new Transformer(options, handler);
- error = false;
- if (records) {
- setImmediate(function() {
- var k, len1, record;
- for (k = 0, len1 = records.length; k < len1; k++) {
- record = records[k];
- if (error) {
- break;
- }
- transform.write(record);
- }
- return transform.end();
- });
- }
- if (callback || options.consume) {
- result = [];
- transform.on('readable', function() {
- var record, results;
- results = [];
- while ((record = transform.read())) {
- if (callback) {
- results.push(result.push(record));
- } else {
- results.push(void 0);
- }
- }
- return results;
- });
- transform.on('error', function(err) {
- error = true;
- if (callback) {
- return callback(err);
- }
- });
- transform.on('end', function() {
- if (callback && !error) {
- return callback(null, result);
- }
- });
- }
- return transform;
-};
-
-// ## Transformer
-
-// Options are documented [here](http://csv.js.org/transform/options/).
-Transformer = function(options1 = {}, handler1) {
- var base, base1;
- this.options = options1;
- this.handler = handler1;
- if ((base = this.options).consume == null) {
- base.consume = false;
- }
- this.options.objectMode = true;
- if ((base1 = this.options).parallel == null) {
- base1.parallel = 100;
- }
- stream.Transform.call(this, this.options);
- this.state = {
- running: 0,
- started: 0,
- finished: 0
- };
- return this;
-};
-
-util.inherits(Transformer, stream.Transform);
-
-module.exports.Transformer = Transformer;
-
-Transformer.prototype._transform = function(chunk, encoding, cb) {
- var callback, err, l;
- this.state.started++;
- this.state.running++;
- if (this.state.running < this.options.parallel) {
- cb();
- cb = null;
- }
- try {
- l = this.handler.length;
- if (this.options.params != null) {
- l--;
- }
- if (l === 1) { // sync
- this.__done(null, [this.handler.call(this, chunk, this.options.params)], cb);
- } else if (l === 2) { // async
- callback = (err, ...chunks) => {
- return this.__done(err, chunks, cb);
- };
- this.handler.call(this, chunk, callback, this.options.params);
- } else {
- throw Error("Invalid handler arguments");
- }
- return false;
- } catch (error1) {
- err = error1;
- return this.__done(err);
- }
-};
-
-Transformer.prototype._flush = function(cb) {
- this._ending = function() {
- if (this.state.running === 0) {
- this._ending = undefined
- return cb();
- }
- };
- return this._ending();
-};
-
-Transformer.prototype.__done = function(err, chunks, cb) {
- var chunk, j, len;
- this.state.running--;
- if (err) {
- return this.emit('error', err);
- }
- this.state.finished++;
- for (j = 0, len = chunks.length; j < len; j++) {
- chunk = chunks[j];
- if (typeof chunk === 'number') {
- chunk = `${chunk}`;
- }
- if ((chunk != null) && chunk !== '') {
- // We dont push empty string
- // See https://nodejs.org/api/stream.html#stream_readable_push
- this.push(chunk);
- }
- }
- if (cb) {
- cb();
- }
- if (this._ending) {
- return this._ending();
- }
-};
-
-// [readme]: https://github.com/wdavidw/node-stream-transform
-// [samples]: https://github.com/wdavidw/node-stream-transform/tree/master/samples
-// [tests]: https://github.com/wdavidw/node-stream-transform/tree/master/test
-
-}).call(this)}).call(this,require("timers").setImmediate)
-},{"mixme":1,"stream":29,"timers":45,"util":49}]},{},[6])(6)
-});
diff --git a/packages/csv/lib/es5/sync.d.ts b/packages/csv/lib/es5/sync.d.ts
index 90f747fd2..73e90ce6a 100644
--- a/packages/csv/lib/es5/sync.d.ts
+++ b/packages/csv/lib/es5/sync.d.ts
@@ -1,11 +1,11 @@
// Alias to the ES6 modules exposing the stream and callback APIs
-//import * as generate from "csv-generate/lib/sync";
+import * as generateImport from "csv-generate/lib/sync";
import * as parseImport from 'csv-parse/lib/sync';
import * as transformImport from 'stream-transform/lib/sync';
import * as stringifyImport from 'csv-stringify/lib/sync';
-//export var generate: typeof generateImport,
+export var generate: typeof generateImport;
export var parse: typeof parseImport;
export var transform: typeof transformImport;
export var stringify: typeof stringifyImport;
diff --git a/packages/csv/lib/index.d.ts b/packages/csv/lib/index.d.ts
index c4f35a838..7c4d227c0 100644
--- a/packages/csv/lib/index.d.ts
+++ b/packages/csv/lib/index.d.ts
@@ -1,13 +1,9 @@
-// Alias to the ES6 modules exposing the stream and callback APIs
-import * as generateImport from 'csv-generate';
-import * as parseImport from 'csv-parse';
-import * as transformImport from 'stream-transform';
-import * as stringifyImport from 'csv-stringify';
+// Alias to the modules exposing the stream and callback APIs
-export var generate: typeof generateImport;
-export var parse: typeof parseImport;
-export var transform: typeof transformImport;
-export var stringify: typeof stringifyImport;
+import generate from 'csv-generate/lib/index.js';
+import parse from 'csv-parse/lib/index.js';
+import transform from 'stream-transform/lib/index.js';
+import stringify from 'csv-stringify/lib/index.js';
-export as namespace csv;
+export {generate, parse, transform, stringify}
diff --git a/packages/csv/lib/index.js b/packages/csv/lib/index.js
index bcdaad278..0f601abb4 100644
--- a/packages/csv/lib/index.js
+++ b/packages/csv/lib/index.js
@@ -1,9 +1,9 @@
-// Alias to the ES6 modules exposing the stream and callback APIs
+// Alias to the modules exposing the stream and callback APIs
-module.exports = {
- generate: require('csv-generate/lib'),
- parse: require('csv-parse/lib'),
- transform: require('stream-transform/lib'),
- stringify: require('csv-stringify/lib')
-}
+import generate from 'csv-generate/lib/index.js'
+import parse from 'csv-parse/lib/index.js'
+import stringify from 'csv-stringify/lib/index.js'
+import transform from 'stream-transform/lib/index.js'
+
+export {generate, parse, stringify, transform}
diff --git a/packages/csv/lib/sync.d.ts b/packages/csv/lib/sync.d.ts
index 90f747fd2..9483d5b9e 100644
--- a/packages/csv/lib/sync.d.ts
+++ b/packages/csv/lib/sync.d.ts
@@ -1,13 +1,9 @@
-// Alias to the ES6 modules exposing the stream and callback APIs
-//import * as generate from "csv-generate/lib/sync";
-import * as parseImport from 'csv-parse/lib/sync';
-import * as transformImport from 'stream-transform/lib/sync';
-import * as stringifyImport from 'csv-stringify/lib/sync';
+// Alias to the modules exposing the sync APIs
-//export var generate: typeof generateImport,
-export var parse: typeof parseImport;
-export var transform: typeof transformImport;
-export var stringify: typeof stringifyImport;
+import generate from 'csv-generate/lib/sync.js';
+import parse from 'csv-parse/lib/sync.js';
+import transform from 'stream-transform/lib/sync.js';
+import stringify from 'csv-stringify/lib/sync.js';
-export as namespace csv;
+export {generate, parse, transform, stringify}
diff --git a/packages/csv/lib/sync.js b/packages/csv/lib/sync.js
index ca3d4e942..d5c3f3c9c 100644
--- a/packages/csv/lib/sync.js
+++ b/packages/csv/lib/sync.js
@@ -1,9 +1,9 @@
-// Alias to the ES6 modules exposing the sync API
+// Alias to the modules exposing the sync API
-module.exports = {
- generate: require('csv-generate/lib/sync'),
- parse: require('csv-parse/lib/sync'),
- transform: require('stream-transform/lib/sync'),
- stringify: require('csv-stringify/lib/sync')
-}
+import generate from 'csv-generate/lib/sync.js'
+import parse from 'csv-parse/lib/sync.js'
+import stringify from 'csv-stringify/lib/sync.js'
+import transform from 'stream-transform/lib/sync.js'
+
+export {generate, parse, stringify, transform}
diff --git a/packages/csv/package.json b/packages/csv/package.json
index f48acd975..2564f0d69 100644
--- a/packages/csv/package.json
+++ b/packages/csv/package.json
@@ -63,16 +63,15 @@
"typescript": "^4.4.2"
},
"mocha": {
- "throw-deprecation": true,
- "require": [
- "should",
- "coffeescript/register",
- "ts-node/register"
- ],
"inline-diffs": true,
- "timeout": 40000,
+ "loader": "./test/loaders/all.mjs",
+ "recursive": true,
"reporter": "spec",
- "recursive": true
+ "require": [
+ "should"
+ ],
+ "throw-deprecation": true,
+ "timeout": 40000
},
"files": [
"lib"
@@ -82,5 +81,6 @@
"preversion": "cp lib/*.ts lib/es5 && git add lib/es5/*.ts",
"test": "mocha test/**/*.{coffee,ts}"
},
+ "type": "module",
"types": "./lib/index.d.ts"
}
diff --git a/packages/csv/samples/callback.js b/packages/csv/samples/callback.js
index 7bb296b76..424225ac0 100644
--- a/packages/csv/samples/callback.js
+++ b/packages/csv/samples/callback.js
@@ -1,5 +1,5 @@
-var csv = require('..');
+import csv from '../lib/index.js'
csv.generate({seed: 1, columns: 2, length: 20}, function(err, data){
csv.parse(data, function(err, data){
diff --git a/packages/csv/samples/pipe.js b/packages/csv/samples/pipe.js
index 243eb957f..c0081b3f2 100644
--- a/packages/csv/samples/pipe.js
+++ b/packages/csv/samples/pipe.js
@@ -1,6 +1,6 @@
// Import the package main module
-const csv = require('..')
+import csv from '../lib/index.js'
// Use the module
csv
// Generate 20 records
diff --git a/packages/csv/samples/pipe_funny.js b/packages/csv/samples/pipe_funny.js
index e1036c15c..4f7846c1a 100644
--- a/packages/csv/samples/pipe_funny.js
+++ b/packages/csv/samples/pipe_funny.js
@@ -1,6 +1,6 @@
// Import the package main module
-const csv = require('..')
+import csv from '../lib/index.js'
// Use the module
csv.generate ({seed: 1, length: 20}).pipe(
csv.parse ()).pipe(
diff --git a/packages/csv/samples/stream.js b/packages/csv/samples/stream.js
index caf2327fd..96645668f 100644
--- a/packages/csv/samples/stream.js
+++ b/packages/csv/samples/stream.js
@@ -1,17 +1,18 @@
-var csv = require('..');
-var i = 0
+import * as csv from '../lib/index.js'
-var generator = csv.generate({seed: 1, columns: 2, length: 20});
-var parser = csv.parse();
-var transformer = csv.transform(function(data){
+let i = 0
+
+const generator = csv.generate({seed: 1, columns: 2, length: 20});
+const parser = csv.parse();
+const transformer = csv.transform(function(data){
i++
return data.map(function(value){return value.toUpperCase()});
});
-var stringifier = csv.stringify();
+const stringifier = csv.stringify();
generator.on('readable', function(){
- while(data = generator.read()){
+ let data; while(data = generator.read()){
parser.write(data);
}
});
@@ -20,7 +21,7 @@ generator.on('end', function(){
});
parser.on('readable', function(){
- while(data = parser.read()){
+ let data; while(data = parser.read()){
transformer.write(data);
}
});
@@ -29,7 +30,7 @@ parser.on('end', function(){
});
transformer.on('readable', function(){
- while(data = transformer.read()){
+ let data; while(data = transformer.read()){
stringifier.write(data);
}
});
@@ -38,12 +39,10 @@ transformer.on('end', function(){
});
stringifier.on('readable', function(){
- while(data = stringifier.read()){
+ let data; while(data = stringifier.read()){
process.stdout.write(data);
}
});
generator.on('end', function(){
process.stdout.write('=> ' + i + ' records\n');
});
-
-
diff --git a/packages/csv/test/api.coffee b/packages/csv/test/api.coffee
new file mode 100644
index 000000000..f467fc7f8
--- /dev/null
+++ b/packages/csv/test/api.coffee
@@ -0,0 +1,32 @@
+
+import {generate, parse, stringify, transform} from '../lib/index.js'
+
+describe 'api', ->
+
+ it 'generate', (next) ->
+ generate length: 1, columns: 1, seed: 1, encoding: 'utf8', (err, data) ->
+ data.should.eql 'OMH' unless err
+ next err
+
+ it 'parse', (next) ->
+ parse 'abc,def', (err, data) ->
+ data.should.eql [ [ 'abc', 'def' ] ] unless err
+ next err
+
+ it 'stringify', (next) ->
+ stringify [ [ 'abc', 'def' ] ], (err, data) ->
+ data.should.eql 'abc,def\n' unless err
+ next err
+
+ it 'transform', (next) ->
+ transform [
+ ['abc','def']
+ ], (record) ->
+ record.push(record.shift())
+ record
+ , (err, output) ->
+ output.should.eql [
+ [ 'def', 'abc' ]
+ ] unless err
+ next err
+
diff --git a/packages/csv/test/api.types.ts b/packages/csv/test/api.types.ts
index e2aeeb4d8..7bf9243ff 100644
--- a/packages/csv/test/api.types.ts
+++ b/packages/csv/test/api.types.ts
@@ -1,15 +1,24 @@
import 'should'
-import {stringify, transform} from '../lib/index'
+import {generate, parse, stringify, transform} from '../lib/index.js'
describe('API Types', () => {
describe('Initialisation', () => {
+ it('generate', () => {
+ // with options + handler
+ generate({length: 1}, (err: Error | undefined, records: Array>) => err || records)
+ })
+
+ it('parse', () => {
+ // With input + handler
+ parse('abc,def', (err: Error | undefined, records: Array>) => err || records)
+ })
+
it('stringify', () => {
// With handler
- const stringifier = stringify( (err: Error | undefined, output: string) => err || output )
- stringifier.should.be.an.Object() // Disable unused variable warning
+ stringify( (err: Error | undefined, output: string) => err || output )
})
it('transform', () => {
diff --git a/packages/csv/test/loaders/all.mjs b/packages/csv/test/loaders/all.mjs
new file mode 100644
index 000000000..4a3828589
--- /dev/null
+++ b/packages/csv/test/loaders/all.mjs
@@ -0,0 +1,37 @@
+
+import * as coffee from './coffee.mjs'
+import * as ts from 'ts-node/esm'
+
+const coffeeRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+const tsRegex = /\.ts$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ if (coffeeRegex.test(specifier)) {
+ return coffee.resolve.apply(this, arguments)
+ }
+ if (tsRegex.test(specifier)) {
+ return ts.resolve.apply(this, arguments)
+ }
+ return ts.resolve.apply(this, arguments);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ if (coffeeRegex.test(url)) {
+ return coffee.getFormat.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.getFormat.apply(this, arguments)
+ }
+ return ts.getFormat.apply(this, arguments);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url } = context;
+ if (coffeeRegex.test(url)) {
+ return coffee.transformSource.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.transformSource.apply(this, arguments)
+ }
+ return ts.transformSource.apply(this, arguments);
+}
diff --git a/packages/csv/test/loaders/coffee.mjs b/packages/csv/test/loaders/coffee.mjs
new file mode 100644
index 000000000..f4945adb7
--- /dev/null
+++ b/packages/csv/test/loaders/coffee.mjs
@@ -0,0 +1,50 @@
+// coffeescript-loader.mjs
+import { URL, pathToFileURL } from 'url';
+import CoffeeScript from 'coffeescript';
+import { cwd } from 'process';
+
+const baseURL = pathToFileURL(`${cwd()}/`).href;
+
+// CoffeeScript files end in .coffee, .litcoffee or .coffee.md.
+const extensionsRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ const { parentURL = baseURL } = context;
+ // Node.js normally errors on unknown file extensions, so return a URL for
+ // specifiers ending in the CoffeeScript file extensions.
+ if (extensionsRegex.test(specifier)) {
+ return {
+ url: new URL(specifier, parentURL).href,
+ stop: true
+ };
+ }
+ // Let Node.js handle all other specifiers.
+ return defaultResolve(specifier, context, defaultResolve);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ // Now that we patched resolve to let CoffeeScript URLs through, we need to
+ // tell Node.js what format such URLs should be interpreted as. For the
+ // purposes of this loader, all CoffeeScript URLs are ES modules.
+ if (extensionsRegex.test(url)) {
+ return {
+ format: 'module',
+ stop: true
+ };
+ }
+ // Let Node.js handle all other URLs.
+ return defaultGetFormat(url, context, defaultGetFormat);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url, format } = context;
+
+ if (extensionsRegex.test(url)) {
+ return {
+ source: CoffeeScript.compile(String(source), { bare: true })
+ };
+ }
+
+ // Let Node.js handle all other sources.
+ return defaultTransformSource(source, context, defaultTransformSource);
+}
diff --git a/packages/csv/test/samples.coffee b/packages/csv/test/samples.coffee
new file mode 100644
index 000000000..e85d0ac1e
--- /dev/null
+++ b/packages/csv/test/samples.coffee
@@ -0,0 +1,17 @@
+
+import fs from 'fs'
+import path from 'path'
+import {exec} from 'child_process'
+
+import { fileURLToPath } from 'url';
+__dirname = path.dirname fileURLToPath `import.meta.url`
+dir = path.resolve __dirname, '../samples'
+samples = fs.readdirSync dir
+
+describe 'Samples', ->
+
+ for sample in samples
+ continue unless /\.js$/.test sample
+ it "Sample #{sample}", (callback) ->
+ exec "node #{path.resolve dir, sample}", (err) ->
+ callback err
diff --git a/packages/csv/test/sync.coffee b/packages/csv/test/sync.coffee
index 563840470..7122fe231 100644
--- a/packages/csv/test/sync.coffee
+++ b/packages/csv/test/sync.coffee
@@ -1,46 +1,25 @@
-csv = require '../lib/sync'
+import {generate, parse, transform, stringify} from '../lib/sync.js'
describe 'api sync', ->
- it 'expose generate', ->
- csv
- .generate length: 10, objectMode: true
- .length.should.eql 10
+ it 'generate', ->
+ generate length: 1, columns: 1, seed: 1, objectMode: true
+ .should.eql [ [ 'OMH' ] ]
- it 'expose parse', ->
- csv
- .parse """
- a,b,c
- 1,2,3
- """
- .should.eql [
- [ 'a', 'b', 'c' ]
- [ '1', '2', '3' ]
- ]
+ it 'parse', ->
+ parse 'abc,def'
+ .should.eql [ [ 'abc', 'def' ] ]
- it 'expose transform', ->
- csv
- .transform [
- [ 'a', 'b', 'c' ]
- [ '1', '2', '3' ]
+ it 'transform', ->
+ transform [
+ [ 'abc', 'def' ]
], (record) ->
record.push record.shift()
record
- .should.eql [
- [ 'b', 'c', 'a' ]
- [ '2', '3', '1' ]
- ]
+ .should.eql [ [ 'def', 'abc' ] ]
- it 'expose stringify', ->
- csv
- .stringify [
- [ 'a', 'b', 'c' ]
- [ '1', '2', '3' ]
- ]
- .should.eql """
- a,b,c
- 1,2,3
-
- """
+ it 'stringify', ->
+ stringify [ [ 'abc', 'def' ] ]
+ .should.eql 'abc,def\n'
diff --git a/packages/csv/tsconfig.json b/packages/csv/tsconfig.json
index 9f40782ea..4db508a7c 100644
--- a/packages/csv/tsconfig.json
+++ b/packages/csv/tsconfig.json
@@ -1,7 +1,8 @@
{
"compileOnSave": false,
"compilerOptions": {
- "target": "es6",
+ "esModuleInterop": true,
+ "module": "ES2020",
"moduleResolution": "node",
"strict": true,
}
diff --git a/packages/stream-transform/lib/browser/index.js b/packages/stream-transform/lib/browser/index.js
index 4fd025e54..25c4814bf 100644
--- a/packages/stream-transform/lib/browser/index.js
+++ b/packages/stream-transform/lib/browser/index.js
@@ -1,318 +1,55 @@
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.parse = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i ref; i = 1 <= ref ? ++j : --j) {
- source = arguments[i];
-
- if (exports.is_object_literal(source)) {
- if (!exports.is_object_literal(target)) {
- target = {};
- }
-
- for (name in source) {
- if (name === '__proto__') {
- continue;
- }
-
- target[name] = exports.mutate(target[name], source[name]);
- }
- } else if (Array.isArray(source)) {
- target = function () {
- var k, len, results;
- results = [];
-
- for (k = 0, len = source.length; k < len; k++) {
- v = source[k];
- results.push(exports.mutate(void 0, v));
- }
-
- return results;
- }();
- } else if (source !== void 0) {
- target = source;
- }
- }
-
- return target;
-};
-
-exports.snake_case = function snake_case(source) {
- var convert = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
- var name, src, target, u;
- target = {};
-
- if (exports.is_object_literal(source)) {
- u = typeof convert === 'number' && convert > 0 ? convert - 1 : convert;
-
- for (name in source) {
- src = source[name];
-
- if (convert) {
- name = _snake_case(name);
- }
-
- target[name] = exports.snake_case(src, u);
- }
- } else {
- target = source;
- }
-
- return target;
-};
-
-exports.compare = function compare(el1, el2) {
- var i, j, k, key, keys1, keys2, len, ref;
-
- if (exports.is_object_literal(el1)) {
- if (!exports.is_object_literal(el2)) {
- return false;
- }
-
- keys1 = Object.keys(el1).sort();
- keys2 = Object.keys(el2).sort();
-
- if (keys1.length !== keys2.length) {
- return false;
- }
-
- for (i = j = 0, len = keys1.length; j < len; i = ++j) {
- key = keys1[i];
-
- if (key !== keys2[i]) {
- return false;
- }
-
- if (!exports.compare(el1[key], el2[key])) {
- return false;
- }
- }
- } else if (Array.isArray(el1)) {
- if (!Array.isArray(el2)) {
- return false;
- }
-
- if (el1.length !== el2.length) {
- return false;
- }
-
- for (i = k = 0, ref = el1.length; 0 <= ref ? k < ref : k > ref; i = 0 <= ref ? ++k : --k) {
- if (!exports.compare(el1[i], el2[i])) {
- return false;
- }
- }
- } else {
- if (el1 !== el2) {
- return false;
- }
- }
-
- return true;
-};
-
-_snake_case = function _snake_case(str) {
- return str.replace(/([A-Z])/g, function (_, match, index) {
- return '_' + match.toLowerCase();
- });
-};
-
-exports.is_object = function is_object(obj) {
- return obj && _typeof(obj) === 'object' && !Array.isArray(obj);
-};
-
-exports.is_object_literal = function is_object_literal(obj) {
- var test;
- test = obj;
-
- if (_typeof(obj) !== 'object' || obj === null) {
- return false;
- } else {
- if (Object.getPrototypeOf(test) === null) {
- return true;
- }
-
- while (!false) {
- if (Object.getPrototypeOf(test = Object.getPrototypeOf(test)) === null) {
- break;
- }
- }
-
- return Object.getPrototypeOf(obj) === test;
- }
-};
-
-},{}],2:[function(require,module,exports){
(function (setImmediate){(function (){
"use strict";
-function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.transform = exports["default"] = void 0;
-// Generated by CoffeeScript 2.5.1
-// # Stream Transformer
-// Pass all elements of an array or a stream to transform, filter and add. Features include:
-// * Extends the Node.js "stream.Transform" API.
-// * Both synchrounous and asynchronous support based and user callback
-// arguments signature.
-// * Ability to skip records.
-// * Sequential and concurrent execution using the "parallel" options.
-// Please look at the [README], the [samples] and the [tests] for additional
-// information.
-var Transformer, clone, stream, util;
-stream = require('stream');
-util = require('util');
-
-var _require = require('mixme');
-
-clone = _require.clone;
-
-// ## Usage
-// Callback approach, for ease of use:
-// `transform(records, [options], handler, callback)`
-// Stream API, for maximum of power:
-// `transform([records], [options], handler, [callback])`
-module.exports = function () {
- var argument, callback, error, handler, i, j, len, options, records, result, transform, type;
- options = {};
+var _stream = _interopRequireDefault(require("stream"));
- for (i = j = 0, len = arguments.length; j < len; i = ++j) {
- argument = arguments[i];
- type = _typeof(argument);
+var _util = _interopRequireDefault(require("util"));
- if (argument === null) {
- type = 'null';
- } else if (type === 'object' && Array.isArray(argument)) {
- type = 'array';
- }
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
- if (type === 'array') {
- records = argument;
- } else if (type === 'object') {
- options = clone(argument);
- } else if (type === 'function') {
- if (handler && i === arguments.length - 1) {
- callback = argument;
- } else {
- handler = argument;
- }
- } else if (type !== 'null') {
- throw new Error("Invalid Arguments: got ".concat(JSON.stringify(argument), " at position ").concat(i));
- }
- }
+function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
- transform = new Transformer(options, handler);
- error = false;
-
- if (records) {
- setImmediate(function () {
- var k, len1, record;
+function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
- for (k = 0, len1 = records.length; k < len1; k++) {
- record = records[k];
+function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
- if (error) {
- break;
- }
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
- transform.write(record);
- }
+function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
- return transform.end();
- });
- }
+function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
- if (callback || options.consume) {
- result = [];
- transform.on('readable', function () {
- var record, results;
- results = [];
+function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
- while (record = transform.read()) {
- if (callback) {
- results.push(result.push(record));
- } else {
- results.push(void 0);
- }
- }
+var Transformer = function Transformer() {
+ var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+ var handler = arguments.length > 1 ? arguments[1] : undefined;
+ this.options = options;
- return results;
- });
- transform.on('error', function (err) {
- error = true;
-
- if (callback) {
- return callback(err);
- }
- });
- transform.on('end', function () {
- if (callback && !error) {
- return callback(null, result);
- }
- });
+ if (options.consume === undefined || options.consume === null) {
+ this.options.consume = false;
}
- return transform;
-}; // ## Transformer
-// Options are documented [here](http://csv.js.org/transform/options/).
-
+ this.options.objectMode = true;
-Transformer = function Transformer() {
- var options1 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
- var handler1 = arguments.length > 1 ? arguments[1] : undefined;
- var base, base1;
- this.options = options1;
- this.handler = handler1;
+ if (options.parallel === undefined || options.parallel === null) {
+ this.options.parallel = 100;
+ }
- if ((base = this.options).consume == null) {
- base.consume = false;
+ if (options.params === undefined || options.params === null) {
+ options.params = null;
}
- this.options.objectMode = true;
+ this.handler = handler;
- if ((base1 = this.options).parallel == null) {
- base1.parallel = 100;
- }
+ _stream["default"].Transform.call(this, this.options);
- stream.Transform.call(this, this.options);
this.state = {
running: 0,
started: 0,
@@ -321,25 +58,23 @@ Transformer = function Transformer() {
return this;
};
-util.inherits(Transformer, stream.Transform);
-module.exports.Transformer = Transformer;
+_util["default"].inherits(Transformer, _stream["default"].Transform);
Transformer.prototype._transform = function (chunk, encoding, cb) {
var _this = this;
- var callback, err, l;
this.state.started++;
this.state.running++;
if (this.state.running < this.options.parallel) {
cb();
- cb = null;
+ cb = null; // Cancel further callback execution
}
try {
- l = this.handler.length;
+ var l = this.handler.length;
- if (this.options.params != null) {
+ if (this.options.params !== null) {
l--;
}
@@ -348,7 +83,7 @@ Transformer.prototype._transform = function (chunk, encoding, cb) {
this.__done(null, [this.handler.call(this, chunk, this.options.params)], cb);
} else if (l === 2) {
// async
- callback = function callback(err) {
+ var callback = function callback(err) {
for (var _len = arguments.length, chunks = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
chunks[_key - 1] = arguments[_key];
}
@@ -358,29 +93,51 @@ Transformer.prototype._transform = function (chunk, encoding, cb) {
this.handler.call(this, chunk, callback, this.options.params);
} else {
- throw Error("Invalid handler arguments");
+ throw Error('Invalid handler arguments');
}
return false;
- } catch (error1) {
- err = error1;
- return this.__done(err);
+ } catch (err) {
+ this.__done(err);
}
};
Transformer.prototype._flush = function (cb) {
this._ending = function () {
if (this.state.running === 0) {
- this._ending = undefined;
- return cb();
+ cb();
}
};
- return this._ending();
-};
+ this._ending();
+}; // Transformer.prototype.__done = function(err, chunks, cb) {
+// var chunk, j, len;
+// this.state.running--;
+// if (err) {
+// return this.emit('error', err);
+// }
+// this.state.finished++;
+// for (j = 0, len = chunks.length; j < len; j++) {
+// chunk = chunks[j];
+// if (typeof chunk === 'number') {
+// chunk = `${chunk}`;
+// }
+// if ((chunk != null) && chunk !== '') {
+// // We dont push empty string
+// // See https://nodejs.org/api/stream.html#stream_readable_push
+// this.push(chunk);
+// }
+// }
+// if (cb) {
+// cb();
+// }
+// if (this._ending) {
+// return this._ending();
+// }
+// };
+
Transformer.prototype.__done = function (err, chunks, cb) {
- var chunk, j, len;
this.state.running--;
if (err) {
@@ -389,18 +146,27 @@ Transformer.prototype.__done = function (err, chunks, cb) {
this.state.finished++;
- for (j = 0, len = chunks.length; j < len; j++) {
- chunk = chunks[j];
+ var _iterator = _createForOfIteratorHelper(chunks),
+ _step;
- if (typeof chunk === 'number') {
- chunk = "".concat(chunk);
- }
+ try {
+ for (_iterator.s(); !(_step = _iterator.n()).done;) {
+ var chunk = _step.value;
- if (chunk != null && chunk !== '') {
- // We dont push empty string
+ if (typeof chunk === 'number') {
+ chunk = "".concat(chunk);
+ } // We dont push empty string
// See https://nodejs.org/api/stream.html#stream_readable_push
- this.push(chunk);
+
+
+ if (chunk !== undefined && chunk !== null && chunk !== '') {
+ this.push(chunk);
+ }
}
+ } catch (err) {
+ _iterator.e(err);
+ } finally {
+ _iterator.f();
}
if (cb) {
@@ -408,14 +174,94 @@ Transformer.prototype.__done = function (err, chunks, cb) {
}
if (this._ending) {
- return this._ending();
+ this._ending();
}
-}; // [readme]: https://github.com/wdavidw/node-stream-transform
-// [samples]: https://github.com/wdavidw/node-stream-transform/tree/master/samples
-// [tests]: https://github.com/wdavidw/node-stream-transform/tree/master/test
+};
+
+var transform = function transform() {
+ var options = {};
+ var callback, handler, records;
+
+ for (var i = 0; i < arguments.length; i++) {
+ var argument = arguments[i];
+
+ var type = _typeof(argument);
+
+ if (argument === null) {
+ type = 'null';
+ } else if (type === 'object' && Array.isArray(argument)) {
+ type = 'array';
+ }
+
+ if (type === 'array') {
+ records = argument;
+ } else if (type === 'object') {
+ options = _objectSpread({}, argument);
+ } else if (type === 'function') {
+ if (handler && i === arguments.length - 1) {
+ callback = argument;
+ } else {
+ handler = argument;
+ }
+ } else if (type !== 'null') {
+ throw new Error("Invalid Arguments: got ".concat(JSON.stringify(argument), " at position ").concat(i));
+ }
+ }
+
+ var transformer = new Transformer(options, handler);
+ var error = false;
+
+ if (records) {
+ setImmediate(function () {
+ var _iterator2 = _createForOfIteratorHelper(records),
+ _step2;
+
+ try {
+ for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
+ var record = _step2.value;
+ if (error) break;
+ transformer.write(record);
+ }
+ } catch (err) {
+ _iterator2.e(err);
+ } finally {
+ _iterator2.f();
+ }
+
+ transformer.end();
+ });
+ }
+
+ if (callback || options.consume) {
+ var result = [];
+ transformer.on('readable', function () {
+ var record;
+
+ while (record = transformer.read()) {
+ if (callback) {
+ result.push(record);
+ }
+ }
+ });
+ transformer.on('error', function (err) {
+ error = true;
+ if (callback) callback(err);
+ });
+ transformer.on('end', function () {
+ if (callback && !error) callback(null, result);
+ });
+ }
+
+ return transformer;
+};
+
+exports.transform = transform;
+transform.Transformer = Transformer;
+var _default = transform;
+exports["default"] = _default;
}).call(this)}).call(this,require("timers").setImmediate)
-},{"mixme":1,"stream":25,"timers":41,"util":45}],3:[function(require,module,exports){
+},{"stream":24,"timers":40,"util":44}],2:[function(require,module,exports){
(function (global){(function (){
'use strict';
@@ -444,7 +290,7 @@ module.exports = function availableTypedArrays() {
};
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{}],4:[function(require,module,exports){
+},{}],3:[function(require,module,exports){
'use strict'
exports.byteLength = byteLength
@@ -596,9 +442,9 @@ function fromByteArray (uint8) {
return parts.join('')
}
-},{}],5:[function(require,module,exports){
+},{}],4:[function(require,module,exports){
-},{}],6:[function(require,module,exports){
+},{}],5:[function(require,module,exports){
(function (Buffer){(function (){
/*!
* The buffer module from node.js, for the browser.
@@ -2379,7 +2225,7 @@ function numberIsNaN (obj) {
}
}).call(this)}).call(this,require("buffer").Buffer)
-},{"base64-js":4,"buffer":6,"ieee754":18}],7:[function(require,module,exports){
+},{"base64-js":3,"buffer":5,"ieee754":17}],6:[function(require,module,exports){
'use strict';
var GetIntrinsic = require('get-intrinsic');
@@ -2396,7 +2242,7 @@ module.exports = function callBoundIntrinsic(name, allowMissing) {
return intrinsic;
};
-},{"./":8,"get-intrinsic":14}],8:[function(require,module,exports){
+},{"./":7,"get-intrinsic":13}],7:[function(require,module,exports){
'use strict';
var bind = require('function-bind');
@@ -2445,7 +2291,7 @@ if ($defineProperty) {
module.exports.apply = applyBind;
}
-},{"function-bind":13,"get-intrinsic":14}],9:[function(require,module,exports){
+},{"function-bind":12,"get-intrinsic":13}],8:[function(require,module,exports){
'use strict';
var GetIntrinsic = require('get-intrinsic');
@@ -2462,7 +2308,7 @@ if ($gOPD) {
module.exports = $gOPD;
-},{"get-intrinsic":14}],10:[function(require,module,exports){
+},{"get-intrinsic":13}],9:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -2961,7 +2807,7 @@ function eventTargetAgnosticAddListener(emitter, name, listener, flags) {
}
}
-},{}],11:[function(require,module,exports){
+},{}],10:[function(require,module,exports){
var hasOwn = Object.prototype.hasOwnProperty;
var toString = Object.prototype.toString;
@@ -2985,7 +2831,7 @@ module.exports = function forEach (obj, fn, ctx) {
};
-},{}],12:[function(require,module,exports){
+},{}],11:[function(require,module,exports){
'use strict';
/* eslint no-invalid-this: 1 */
@@ -3039,14 +2885,14 @@ module.exports = function bind(that) {
return bound;
};
-},{}],13:[function(require,module,exports){
+},{}],12:[function(require,module,exports){
'use strict';
var implementation = require('./implementation');
module.exports = Function.prototype.bind || implementation;
-},{"./implementation":12}],14:[function(require,module,exports){
+},{"./implementation":11}],13:[function(require,module,exports){
'use strict';
var undefined;
@@ -3378,7 +3224,7 @@ module.exports = function GetIntrinsic(name, allowMissing) {
return value;
};
-},{"function-bind":13,"has":17,"has-symbols":15}],15:[function(require,module,exports){
+},{"function-bind":12,"has":16,"has-symbols":14}],14:[function(require,module,exports){
'use strict';
var origSymbol = typeof Symbol !== 'undefined' && Symbol;
@@ -3393,7 +3239,7 @@ module.exports = function hasNativeSymbols() {
return hasSymbolSham();
};
-},{"./shams":16}],16:[function(require,module,exports){
+},{"./shams":15}],15:[function(require,module,exports){
'use strict';
/* eslint complexity: [2, 18], max-statements: [2, 33] */
@@ -3437,14 +3283,14 @@ module.exports = function hasSymbols() {
return true;
};
-},{}],17:[function(require,module,exports){
+},{}],16:[function(require,module,exports){
'use strict';
var bind = require('function-bind');
module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty);
-},{"function-bind":13}],18:[function(require,module,exports){
+},{"function-bind":12}],17:[function(require,module,exports){
/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */
exports.read = function (buffer, offset, isLE, mLen, nBytes) {
var e, m
@@ -3531,7 +3377,7 @@ exports.write = function (buffer, value, offset, isLE, mLen, nBytes) {
buffer[offset + i - d] |= s * 128
}
-},{}],19:[function(require,module,exports){
+},{}],18:[function(require,module,exports){
if (typeof Object.create === 'function') {
// implementation from standard node.js 'util' module
module.exports = function inherits(ctor, superCtor) {
@@ -3560,7 +3406,7 @@ if (typeof Object.create === 'function') {
}
}
-},{}],20:[function(require,module,exports){
+},{}],19:[function(require,module,exports){
'use strict';
var hasToStringTag = typeof Symbol === 'function' && typeof Symbol.toStringTag === 'symbol';
@@ -3595,7 +3441,7 @@ isStandardArguments.isLegacyArguments = isLegacyArguments; // for tests
module.exports = supportsStandardArguments ? isStandardArguments : isLegacyArguments;
-},{"call-bind/callBound":7}],21:[function(require,module,exports){
+},{"call-bind/callBound":6}],20:[function(require,module,exports){
'use strict';
var toStr = Object.prototype.toString;
@@ -3635,7 +3481,7 @@ module.exports = function isGeneratorFunction(fn) {
return getProto(fn) === GeneratorFunction;
};
-},{}],22:[function(require,module,exports){
+},{}],21:[function(require,module,exports){
(function (global){(function (){
'use strict';
@@ -3700,7 +3546,7 @@ module.exports = function isTypedArray(value) {
};
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"available-typed-arrays":3,"call-bind/callBound":7,"es-abstract/helpers/getOwnPropertyDescriptor":9,"foreach":11,"has-symbols":15}],23:[function(require,module,exports){
+},{"available-typed-arrays":2,"call-bind/callBound":6,"es-abstract/helpers/getOwnPropertyDescriptor":8,"foreach":10,"has-symbols":14}],22:[function(require,module,exports){
// shim for using process in browser
var process = module.exports = {};
@@ -3886,7 +3732,7 @@ process.chdir = function (dir) {
};
process.umask = function() { return 0; };
-},{}],24:[function(require,module,exports){
+},{}],23:[function(require,module,exports){
/*! safe-buffer. MIT License. Feross Aboukhadijeh */
/* eslint-disable node/no-deprecated-api */
var buffer = require('buffer')
@@ -3953,7 +3799,7 @@ SafeBuffer.allocUnsafeSlow = function (size) {
return buffer.SlowBuffer(size)
}
-},{"buffer":6}],25:[function(require,module,exports){
+},{"buffer":5}],24:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -4084,7 +3930,7 @@ Stream.prototype.pipe = function(dest, options) {
return dest;
};
-},{"events":10,"inherits":19,"readable-stream/lib/_stream_duplex.js":27,"readable-stream/lib/_stream_passthrough.js":28,"readable-stream/lib/_stream_readable.js":29,"readable-stream/lib/_stream_transform.js":30,"readable-stream/lib/_stream_writable.js":31,"readable-stream/lib/internal/streams/end-of-stream.js":35,"readable-stream/lib/internal/streams/pipeline.js":37}],26:[function(require,module,exports){
+},{"events":9,"inherits":18,"readable-stream/lib/_stream_duplex.js":26,"readable-stream/lib/_stream_passthrough.js":27,"readable-stream/lib/_stream_readable.js":28,"readable-stream/lib/_stream_transform.js":29,"readable-stream/lib/_stream_writable.js":30,"readable-stream/lib/internal/streams/end-of-stream.js":34,"readable-stream/lib/internal/streams/pipeline.js":36}],25:[function(require,module,exports){
'use strict';
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
@@ -4213,7 +4059,7 @@ createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
module.exports.codes = codes;
-},{}],27:[function(require,module,exports){
+},{}],26:[function(require,module,exports){
(function (process){(function (){
// Copyright Joyent, Inc. and other Node contributors.
//
@@ -4355,7 +4201,7 @@ Object.defineProperty(Duplex.prototype, 'destroyed', {
}
});
}).call(this)}).call(this,require('_process'))
-},{"./_stream_readable":29,"./_stream_writable":31,"_process":23,"inherits":19}],28:[function(require,module,exports){
+},{"./_stream_readable":28,"./_stream_writable":30,"_process":22,"inherits":18}],27:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -4395,7 +4241,7 @@ function PassThrough(options) {
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};
-},{"./_stream_transform":30,"inherits":19}],29:[function(require,module,exports){
+},{"./_stream_transform":29,"inherits":18}],28:[function(require,module,exports){
(function (process,global){(function (){
// Copyright Joyent, Inc. and other Node contributors.
//
@@ -5522,7 +5368,7 @@ function indexOf(xs, x) {
return -1;
}
}).call(this)}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"../errors":26,"./_stream_duplex":27,"./internal/streams/async_iterator":32,"./internal/streams/buffer_list":33,"./internal/streams/destroy":34,"./internal/streams/from":36,"./internal/streams/state":38,"./internal/streams/stream":39,"_process":23,"buffer":6,"events":10,"inherits":19,"string_decoder/":40,"util":5}],30:[function(require,module,exports){
+},{"../errors":25,"./_stream_duplex":26,"./internal/streams/async_iterator":31,"./internal/streams/buffer_list":32,"./internal/streams/destroy":33,"./internal/streams/from":35,"./internal/streams/state":37,"./internal/streams/stream":38,"_process":22,"buffer":5,"events":9,"inherits":18,"string_decoder/":39,"util":4}],29:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -5724,7 +5570,7 @@ function done(stream, er, data) {
if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
return stream.push(null);
}
-},{"../errors":26,"./_stream_duplex":27,"inherits":19}],31:[function(require,module,exports){
+},{"../errors":25,"./_stream_duplex":26,"inherits":18}],30:[function(require,module,exports){
(function (process,global){(function (){
// Copyright Joyent, Inc. and other Node contributors.
//
@@ -6424,7 +6270,7 @@ Writable.prototype._destroy = function (err, cb) {
cb(err);
};
}).call(this)}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"../errors":26,"./_stream_duplex":27,"./internal/streams/destroy":34,"./internal/streams/state":38,"./internal/streams/stream":39,"_process":23,"buffer":6,"inherits":19,"util-deprecate":42}],32:[function(require,module,exports){
+},{"../errors":25,"./_stream_duplex":26,"./internal/streams/destroy":33,"./internal/streams/state":37,"./internal/streams/stream":38,"_process":22,"buffer":5,"inherits":18,"util-deprecate":41}],31:[function(require,module,exports){
(function (process){(function (){
'use strict';
@@ -6634,7 +6480,7 @@ var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterat
module.exports = createReadableStreamAsyncIterator;
}).call(this)}).call(this,require('_process'))
-},{"./end-of-stream":35,"_process":23}],33:[function(require,module,exports){
+},{"./end-of-stream":34,"_process":22}],32:[function(require,module,exports){
'use strict';
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
@@ -6845,7 +6691,7 @@ function () {
return BufferList;
}();
-},{"buffer":6,"util":5}],34:[function(require,module,exports){
+},{"buffer":5,"util":4}],33:[function(require,module,exports){
(function (process){(function (){
'use strict'; // undocumented cb() API, needed for core, not for public API
@@ -6953,7 +6799,7 @@ module.exports = {
errorOrDestroy: errorOrDestroy
};
}).call(this)}).call(this,require('_process'))
-},{"_process":23}],35:[function(require,module,exports){
+},{"_process":22}],34:[function(require,module,exports){
// Ported from https://github.com/mafintosh/end-of-stream with
// permission from the author, Mathias Buus (@mafintosh).
'use strict';
@@ -7058,12 +6904,12 @@ function eos(stream, opts, callback) {
}
module.exports = eos;
-},{"../../../errors":26}],36:[function(require,module,exports){
+},{"../../../errors":25}],35:[function(require,module,exports){
module.exports = function () {
throw new Error('Readable.from is not available in the browser')
};
-},{}],37:[function(require,module,exports){
+},{}],36:[function(require,module,exports){
// Ported from https://github.com/mafintosh/pump with
// permission from the author, Mathias Buus (@mafintosh).
'use strict';
@@ -7161,7 +7007,7 @@ function pipeline() {
}
module.exports = pipeline;
-},{"../../../errors":26,"./end-of-stream":35}],38:[function(require,module,exports){
+},{"../../../errors":25,"./end-of-stream":34}],37:[function(require,module,exports){
'use strict';
var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;
@@ -7189,10 +7035,10 @@ function getHighWaterMark(state, options, duplexKey, isDuplex) {
module.exports = {
getHighWaterMark: getHighWaterMark
};
-},{"../../../errors":26}],39:[function(require,module,exports){
+},{"../../../errors":25}],38:[function(require,module,exports){
module.exports = require('events').EventEmitter;
-},{"events":10}],40:[function(require,module,exports){
+},{"events":9}],39:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -7489,7 +7335,7 @@ function simpleWrite(buf) {
function simpleEnd(buf) {
return buf && buf.length ? this.write(buf) : '';
}
-},{"safe-buffer":24}],41:[function(require,module,exports){
+},{"safe-buffer":23}],40:[function(require,module,exports){
(function (setImmediate,clearImmediate){(function (){
var nextTick = require('process/browser.js').nextTick;
var apply = Function.prototype.apply;
@@ -7568,7 +7414,7 @@ exports.clearImmediate = typeof clearImmediate === "function" ? clearImmediate :
delete immediateIds[id];
};
}).call(this)}).call(this,require("timers").setImmediate,require("timers").clearImmediate)
-},{"process/browser.js":23,"timers":41}],42:[function(require,module,exports){
+},{"process/browser.js":22,"timers":40}],41:[function(require,module,exports){
(function (global){(function (){
/**
@@ -7639,14 +7485,14 @@ function config (name) {
}
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{}],43:[function(require,module,exports){
+},{}],42:[function(require,module,exports){
module.exports = function isBuffer(arg) {
return arg && typeof arg === 'object'
&& typeof arg.copy === 'function'
&& typeof arg.fill === 'function'
&& typeof arg.readUInt8 === 'function';
}
-},{}],44:[function(require,module,exports){
+},{}],43:[function(require,module,exports){
// Currently in sync with Node.js lib/internal/util/types.js
// https://github.com/nodejs/node/commit/112cc7c27551254aa2b17098fb774867f05ed0d9
@@ -7982,7 +7828,7 @@ exports.isAnyArrayBuffer = isAnyArrayBuffer;
});
});
-},{"is-arguments":20,"is-generator-function":21,"is-typed-array":22,"which-typed-array":46}],45:[function(require,module,exports){
+},{"is-arguments":19,"is-generator-function":20,"is-typed-array":21,"which-typed-array":45}],44:[function(require,module,exports){
(function (process){(function (){
// Copyright Joyent, Inc. and other Node contributors.
//
@@ -8701,7 +8547,7 @@ function callbackify(original) {
exports.callbackify = callbackify;
}).call(this)}).call(this,require('_process'))
-},{"./support/isBuffer":43,"./support/types":44,"_process":23,"inherits":19}],46:[function(require,module,exports){
+},{"./support/isBuffer":42,"./support/types":43,"_process":22,"inherits":18}],45:[function(require,module,exports){
(function (global){(function (){
'use strict';
@@ -8761,5 +8607,5 @@ module.exports = function whichTypedArray(value) {
};
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"available-typed-arrays":3,"call-bind/callBound":7,"es-abstract/helpers/getOwnPropertyDescriptor":9,"foreach":11,"has-symbols":15,"is-typed-array":22}]},{},[2])(2)
+},{"available-typed-arrays":2,"call-bind/callBound":6,"es-abstract/helpers/getOwnPropertyDescriptor":8,"foreach":10,"has-symbols":14,"is-typed-array":21}]},{},[1])(1)
});
diff --git a/packages/stream-transform/lib/browser/sync.js b/packages/stream-transform/lib/browser/sync.js
index 4cb1e3210..d1adaef12 100644
--- a/packages/stream-transform/lib/browser/sync.js
+++ b/packages/stream-transform/lib/browser/sync.js
@@ -1,318 +1,55 @@
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.parse = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i ref; i = 1 <= ref ? ++j : --j) {
- source = arguments[i];
-
- if (exports.is_object_literal(source)) {
- if (!exports.is_object_literal(target)) {
- target = {};
- }
-
- for (name in source) {
- if (name === '__proto__') {
- continue;
- }
-
- target[name] = exports.mutate(target[name], source[name]);
- }
- } else if (Array.isArray(source)) {
- target = function () {
- var k, len, results;
- results = [];
-
- for (k = 0, len = source.length; k < len; k++) {
- v = source[k];
- results.push(exports.mutate(void 0, v));
- }
-
- return results;
- }();
- } else if (source !== void 0) {
- target = source;
- }
- }
-
- return target;
-};
-
-exports.snake_case = function snake_case(source) {
- var convert = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
- var name, src, target, u;
- target = {};
-
- if (exports.is_object_literal(source)) {
- u = typeof convert === 'number' && convert > 0 ? convert - 1 : convert;
-
- for (name in source) {
- src = source[name];
-
- if (convert) {
- name = _snake_case(name);
- }
-
- target[name] = exports.snake_case(src, u);
- }
- } else {
- target = source;
- }
-
- return target;
-};
-
-exports.compare = function compare(el1, el2) {
- var i, j, k, key, keys1, keys2, len, ref;
-
- if (exports.is_object_literal(el1)) {
- if (!exports.is_object_literal(el2)) {
- return false;
- }
-
- keys1 = Object.keys(el1).sort();
- keys2 = Object.keys(el2).sort();
-
- if (keys1.length !== keys2.length) {
- return false;
- }
-
- for (i = j = 0, len = keys1.length; j < len; i = ++j) {
- key = keys1[i];
-
- if (key !== keys2[i]) {
- return false;
- }
-
- if (!exports.compare(el1[key], el2[key])) {
- return false;
- }
- }
- } else if (Array.isArray(el1)) {
- if (!Array.isArray(el2)) {
- return false;
- }
-
- if (el1.length !== el2.length) {
- return false;
- }
-
- for (i = k = 0, ref = el1.length; 0 <= ref ? k < ref : k > ref; i = 0 <= ref ? ++k : --k) {
- if (!exports.compare(el1[i], el2[i])) {
- return false;
- }
- }
- } else {
- if (el1 !== el2) {
- return false;
- }
- }
-
- return true;
-};
-
-_snake_case = function _snake_case(str) {
- return str.replace(/([A-Z])/g, function (_, match, index) {
- return '_' + match.toLowerCase();
- });
-};
-
-exports.is_object = function is_object(obj) {
- return obj && _typeof(obj) === 'object' && !Array.isArray(obj);
-};
-
-exports.is_object_literal = function is_object_literal(obj) {
- var test;
- test = obj;
-
- if (_typeof(obj) !== 'object' || obj === null) {
- return false;
- } else {
- if (Object.getPrototypeOf(test) === null) {
- return true;
- }
-
- while (!false) {
- if (Object.getPrototypeOf(test = Object.getPrototypeOf(test)) === null) {
- break;
- }
- }
-
- return Object.getPrototypeOf(obj) === test;
- }
-};
-
-},{}],2:[function(require,module,exports){
(function (setImmediate){(function (){
"use strict";
-function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-
-// Generated by CoffeeScript 2.5.1
-// # Stream Transformer
-// Pass all elements of an array or a stream to transform, filter and add. Features include:
-// * Extends the Node.js "stream.Transform" API.
-// * Both synchrounous and asynchronous support based and user callback
-// arguments signature.
-// * Ability to skip records.
-// * Sequential and concurrent execution using the "parallel" options.
-// Please look at the [README], the [samples] and the [tests] for additional
-// information.
-var Transformer, clone, stream, util;
-stream = require('stream');
-util = require('util');
-
-var _require = require('mixme');
-
-clone = _require.clone;
-
-// ## Usage
-// Callback approach, for ease of use:
-// `transform(records, [options], handler, callback)`
-// Stream API, for maximum of power:
-// `transform([records], [options], handler, [callback])`
-module.exports = function () {
- var argument, callback, error, handler, i, j, len, options, records, result, transform, type;
- options = {};
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.transform = exports["default"] = void 0;
- for (i = j = 0, len = arguments.length; j < len; i = ++j) {
- argument = arguments[i];
- type = _typeof(argument);
+var _stream = _interopRequireDefault(require("stream"));
- if (argument === null) {
- type = 'null';
- } else if (type === 'object' && Array.isArray(argument)) {
- type = 'array';
- }
-
- if (type === 'array') {
- records = argument;
- } else if (type === 'object') {
- options = clone(argument);
- } else if (type === 'function') {
- if (handler && i === arguments.length - 1) {
- callback = argument;
- } else {
- handler = argument;
- }
- } else if (type !== 'null') {
- throw new Error("Invalid Arguments: got ".concat(JSON.stringify(argument), " at position ").concat(i));
- }
- }
+var _util = _interopRequireDefault(require("util"));
- transform = new Transformer(options, handler);
- error = false;
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
- if (records) {
- setImmediate(function () {
- var k, len1, record;
+function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
- for (k = 0, len1 = records.length; k < len1; k++) {
- record = records[k];
+function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
- if (error) {
- break;
- }
+function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
- transform.write(record);
- }
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
- return transform.end();
- });
- }
+function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
- if (callback || options.consume) {
- result = [];
- transform.on('readable', function () {
- var record, results;
- results = [];
+function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
- while (record = transform.read()) {
- if (callback) {
- results.push(result.push(record));
- } else {
- results.push(void 0);
- }
- }
+function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
- return results;
- });
- transform.on('error', function (err) {
- error = true;
+var Transformer = function Transformer() {
+ var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+ var handler = arguments.length > 1 ? arguments[1] : undefined;
+ this.options = options;
- if (callback) {
- return callback(err);
- }
- });
- transform.on('end', function () {
- if (callback && !error) {
- return callback(null, result);
- }
- });
+ if (options.consume === undefined || options.consume === null) {
+ this.options.consume = false;
}
- return transform;
-}; // ## Transformer
-// Options are documented [here](http://csv.js.org/transform/options/).
-
+ this.options.objectMode = true;
-Transformer = function Transformer() {
- var options1 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
- var handler1 = arguments.length > 1 ? arguments[1] : undefined;
- var base, base1;
- this.options = options1;
- this.handler = handler1;
+ if (options.parallel === undefined || options.parallel === null) {
+ this.options.parallel = 100;
+ }
- if ((base = this.options).consume == null) {
- base.consume = false;
+ if (options.params === undefined || options.params === null) {
+ options.params = null;
}
- this.options.objectMode = true;
+ this.handler = handler;
- if ((base1 = this.options).parallel == null) {
- base1.parallel = 100;
- }
+ _stream["default"].Transform.call(this, this.options);
- stream.Transform.call(this, this.options);
this.state = {
running: 0,
started: 0,
@@ -321,25 +58,23 @@ Transformer = function Transformer() {
return this;
};
-util.inherits(Transformer, stream.Transform);
-module.exports.Transformer = Transformer;
+_util["default"].inherits(Transformer, _stream["default"].Transform);
Transformer.prototype._transform = function (chunk, encoding, cb) {
var _this = this;
- var callback, err, l;
this.state.started++;
this.state.running++;
if (this.state.running < this.options.parallel) {
cb();
- cb = null;
+ cb = null; // Cancel further callback execution
}
try {
- l = this.handler.length;
+ var l = this.handler.length;
- if (this.options.params != null) {
+ if (this.options.params !== null) {
l--;
}
@@ -348,7 +83,7 @@ Transformer.prototype._transform = function (chunk, encoding, cb) {
this.__done(null, [this.handler.call(this, chunk, this.options.params)], cb);
} else if (l === 2) {
// async
- callback = function callback(err) {
+ var callback = function callback(err) {
for (var _len = arguments.length, chunks = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
chunks[_key - 1] = arguments[_key];
}
@@ -358,29 +93,51 @@ Transformer.prototype._transform = function (chunk, encoding, cb) {
this.handler.call(this, chunk, callback, this.options.params);
} else {
- throw Error("Invalid handler arguments");
+ throw Error('Invalid handler arguments');
}
return false;
- } catch (error1) {
- err = error1;
- return this.__done(err);
+ } catch (err) {
+ this.__done(err);
}
};
Transformer.prototype._flush = function (cb) {
this._ending = function () {
if (this.state.running === 0) {
- this._ending = undefined;
- return cb();
+ cb();
}
};
- return this._ending();
-};
+ this._ending();
+}; // Transformer.prototype.__done = function(err, chunks, cb) {
+// var chunk, j, len;
+// this.state.running--;
+// if (err) {
+// return this.emit('error', err);
+// }
+// this.state.finished++;
+// for (j = 0, len = chunks.length; j < len; j++) {
+// chunk = chunks[j];
+// if (typeof chunk === 'number') {
+// chunk = `${chunk}`;
+// }
+// if ((chunk != null) && chunk !== '') {
+// // We dont push empty string
+// // See https://nodejs.org/api/stream.html#stream_readable_push
+// this.push(chunk);
+// }
+// }
+// if (cb) {
+// cb();
+// }
+// if (this._ending) {
+// return this._ending();
+// }
+// };
+
Transformer.prototype.__done = function (err, chunks, cb) {
- var chunk, j, len;
this.state.running--;
if (err) {
@@ -389,18 +146,27 @@ Transformer.prototype.__done = function (err, chunks, cb) {
this.state.finished++;
- for (j = 0, len = chunks.length; j < len; j++) {
- chunk = chunks[j];
+ var _iterator = _createForOfIteratorHelper(chunks),
+ _step;
- if (typeof chunk === 'number') {
- chunk = "".concat(chunk);
- }
+ try {
+ for (_iterator.s(); !(_step = _iterator.n()).done;) {
+ var chunk = _step.value;
- if (chunk != null && chunk !== '') {
- // We dont push empty string
+ if (typeof chunk === 'number') {
+ chunk = "".concat(chunk);
+ } // We dont push empty string
// See https://nodejs.org/api/stream.html#stream_readable_push
- this.push(chunk);
+
+
+ if (chunk !== undefined && chunk !== null && chunk !== '') {
+ this.push(chunk);
+ }
}
+ } catch (err) {
+ _iterator.e(err);
+ } finally {
+ _iterator.f();
}
if (cb) {
@@ -408,41 +174,125 @@ Transformer.prototype.__done = function (err, chunks, cb) {
}
if (this._ending) {
- return this._ending();
+ this._ending();
+ }
+};
+
+var transform = function transform() {
+ var options = {};
+ var callback, handler, records;
+
+ for (var i = 0; i < arguments.length; i++) {
+ var argument = arguments[i];
+
+ var type = _typeof(argument);
+
+ if (argument === null) {
+ type = 'null';
+ } else if (type === 'object' && Array.isArray(argument)) {
+ type = 'array';
+ }
+
+ if (type === 'array') {
+ records = argument;
+ } else if (type === 'object') {
+ options = _objectSpread({}, argument);
+ } else if (type === 'function') {
+ if (handler && i === arguments.length - 1) {
+ callback = argument;
+ } else {
+ handler = argument;
+ }
+ } else if (type !== 'null') {
+ throw new Error("Invalid Arguments: got ".concat(JSON.stringify(argument), " at position ").concat(i));
+ }
+ }
+
+ var transformer = new Transformer(options, handler);
+ var error = false;
+
+ if (records) {
+ setImmediate(function () {
+ var _iterator2 = _createForOfIteratorHelper(records),
+ _step2;
+
+ try {
+ for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
+ var record = _step2.value;
+ if (error) break;
+ transformer.write(record);
+ }
+ } catch (err) {
+ _iterator2.e(err);
+ } finally {
+ _iterator2.f();
+ }
+
+ transformer.end();
+ });
}
-}; // [readme]: https://github.com/wdavidw/node-stream-transform
-// [samples]: https://github.com/wdavidw/node-stream-transform/tree/master/samples
-// [tests]: https://github.com/wdavidw/node-stream-transform/tree/master/test
+
+ if (callback || options.consume) {
+ var result = [];
+ transformer.on('readable', function () {
+ var record;
+
+ while (record = transformer.read()) {
+ if (callback) {
+ result.push(record);
+ }
+ }
+ });
+ transformer.on('error', function (err) {
+ error = true;
+ if (callback) callback(err);
+ });
+ transformer.on('end', function () {
+ if (callback && !error) callback(null, result);
+ });
+ }
+
+ return transformer;
+};
+
+exports.transform = transform;
+transform.Transformer = Transformer;
+var _default = transform;
+exports["default"] = _default;
}).call(this)}).call(this,require("timers").setImmediate)
-},{"mixme":1,"stream":26,"timers":42,"util":46}],3:[function(require,module,exports){
+},{"stream":25,"timers":41,"util":45}],2:[function(require,module,exports){
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = _default;
+
+var _index = _interopRequireDefault(require("./index.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
-function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
-/*
-Stream Transform - sync module
+function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
-Please look at the [project documentation](https://csv.js.org/transform/) for
-additional information.
-*/
-var transform = require('.');
+function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-var _require = require('mixme'),
- clone = _require.clone;
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-module.exports = function () {
+function _default() {
// Import arguments normalization
- var handler, callback;
+ var handler, callback, records;
var options = {};
- for (i in arguments) {
+ for (var i in arguments) {
var argument = arguments[i];
var type = _typeof(argument);
@@ -456,7 +306,7 @@ module.exports = function () {
if (type === 'array') {
records = argument;
} else if (type === 'object') {
- options = clone(argument);
+ options = _objectSpread({}, argument);
} else if (type === 'function') {
if (handler && i === arguments.length - 1) {
callback = argument;
@@ -481,7 +331,7 @@ module.exports = function () {
var chunks = [];
- var transformer = new transform.Transformer(options, handler);
+ var transformer = new _index["default"].Transformer(options, handler);
transformer.push = function (chunk) {
chunks.push(chunk);
@@ -503,9 +353,9 @@ module.exports = function () {
}
return chunks;
-};
+}
-},{".":2,"mixme":1}],4:[function(require,module,exports){
+},{"./index.js":1}],3:[function(require,module,exports){
(function (global){(function (){
'use strict';
@@ -534,7 +384,7 @@ module.exports = function availableTypedArrays() {
};
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{}],5:[function(require,module,exports){
+},{}],4:[function(require,module,exports){
'use strict'
exports.byteLength = byteLength
@@ -686,9 +536,9 @@ function fromByteArray (uint8) {
return parts.join('')
}
-},{}],6:[function(require,module,exports){
+},{}],5:[function(require,module,exports){
-},{}],7:[function(require,module,exports){
+},{}],6:[function(require,module,exports){
(function (Buffer){(function (){
/*!
* The buffer module from node.js, for the browser.
@@ -2469,7 +2319,7 @@ function numberIsNaN (obj) {
}
}).call(this)}).call(this,require("buffer").Buffer)
-},{"base64-js":5,"buffer":7,"ieee754":19}],8:[function(require,module,exports){
+},{"base64-js":4,"buffer":6,"ieee754":18}],7:[function(require,module,exports){
'use strict';
var GetIntrinsic = require('get-intrinsic');
@@ -2486,7 +2336,7 @@ module.exports = function callBoundIntrinsic(name, allowMissing) {
return intrinsic;
};
-},{"./":9,"get-intrinsic":15}],9:[function(require,module,exports){
+},{"./":8,"get-intrinsic":14}],8:[function(require,module,exports){
'use strict';
var bind = require('function-bind');
@@ -2535,7 +2385,7 @@ if ($defineProperty) {
module.exports.apply = applyBind;
}
-},{"function-bind":14,"get-intrinsic":15}],10:[function(require,module,exports){
+},{"function-bind":13,"get-intrinsic":14}],9:[function(require,module,exports){
'use strict';
var GetIntrinsic = require('get-intrinsic');
@@ -2552,7 +2402,7 @@ if ($gOPD) {
module.exports = $gOPD;
-},{"get-intrinsic":15}],11:[function(require,module,exports){
+},{"get-intrinsic":14}],10:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -3051,7 +2901,7 @@ function eventTargetAgnosticAddListener(emitter, name, listener, flags) {
}
}
-},{}],12:[function(require,module,exports){
+},{}],11:[function(require,module,exports){
var hasOwn = Object.prototype.hasOwnProperty;
var toString = Object.prototype.toString;
@@ -3075,7 +2925,7 @@ module.exports = function forEach (obj, fn, ctx) {
};
-},{}],13:[function(require,module,exports){
+},{}],12:[function(require,module,exports){
'use strict';
/* eslint no-invalid-this: 1 */
@@ -3129,14 +2979,14 @@ module.exports = function bind(that) {
return bound;
};
-},{}],14:[function(require,module,exports){
+},{}],13:[function(require,module,exports){
'use strict';
var implementation = require('./implementation');
module.exports = Function.prototype.bind || implementation;
-},{"./implementation":13}],15:[function(require,module,exports){
+},{"./implementation":12}],14:[function(require,module,exports){
'use strict';
var undefined;
@@ -3468,7 +3318,7 @@ module.exports = function GetIntrinsic(name, allowMissing) {
return value;
};
-},{"function-bind":14,"has":18,"has-symbols":16}],16:[function(require,module,exports){
+},{"function-bind":13,"has":17,"has-symbols":15}],15:[function(require,module,exports){
'use strict';
var origSymbol = typeof Symbol !== 'undefined' && Symbol;
@@ -3483,7 +3333,7 @@ module.exports = function hasNativeSymbols() {
return hasSymbolSham();
};
-},{"./shams":17}],17:[function(require,module,exports){
+},{"./shams":16}],16:[function(require,module,exports){
'use strict';
/* eslint complexity: [2, 18], max-statements: [2, 33] */
@@ -3527,14 +3377,14 @@ module.exports = function hasSymbols() {
return true;
};
-},{}],18:[function(require,module,exports){
+},{}],17:[function(require,module,exports){
'use strict';
var bind = require('function-bind');
module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty);
-},{"function-bind":14}],19:[function(require,module,exports){
+},{"function-bind":13}],18:[function(require,module,exports){
/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */
exports.read = function (buffer, offset, isLE, mLen, nBytes) {
var e, m
@@ -3621,7 +3471,7 @@ exports.write = function (buffer, value, offset, isLE, mLen, nBytes) {
buffer[offset + i - d] |= s * 128
}
-},{}],20:[function(require,module,exports){
+},{}],19:[function(require,module,exports){
if (typeof Object.create === 'function') {
// implementation from standard node.js 'util' module
module.exports = function inherits(ctor, superCtor) {
@@ -3650,7 +3500,7 @@ if (typeof Object.create === 'function') {
}
}
-},{}],21:[function(require,module,exports){
+},{}],20:[function(require,module,exports){
'use strict';
var hasToStringTag = typeof Symbol === 'function' && typeof Symbol.toStringTag === 'symbol';
@@ -3685,7 +3535,7 @@ isStandardArguments.isLegacyArguments = isLegacyArguments; // for tests
module.exports = supportsStandardArguments ? isStandardArguments : isLegacyArguments;
-},{"call-bind/callBound":8}],22:[function(require,module,exports){
+},{"call-bind/callBound":7}],21:[function(require,module,exports){
'use strict';
var toStr = Object.prototype.toString;
@@ -3725,7 +3575,7 @@ module.exports = function isGeneratorFunction(fn) {
return getProto(fn) === GeneratorFunction;
};
-},{}],23:[function(require,module,exports){
+},{}],22:[function(require,module,exports){
(function (global){(function (){
'use strict';
@@ -3790,7 +3640,7 @@ module.exports = function isTypedArray(value) {
};
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"available-typed-arrays":4,"call-bind/callBound":8,"es-abstract/helpers/getOwnPropertyDescriptor":10,"foreach":12,"has-symbols":16}],24:[function(require,module,exports){
+},{"available-typed-arrays":3,"call-bind/callBound":7,"es-abstract/helpers/getOwnPropertyDescriptor":9,"foreach":11,"has-symbols":15}],23:[function(require,module,exports){
// shim for using process in browser
var process = module.exports = {};
@@ -3976,7 +3826,7 @@ process.chdir = function (dir) {
};
process.umask = function() { return 0; };
-},{}],25:[function(require,module,exports){
+},{}],24:[function(require,module,exports){
/*! safe-buffer. MIT License. Feross Aboukhadijeh */
/* eslint-disable node/no-deprecated-api */
var buffer = require('buffer')
@@ -4043,7 +3893,7 @@ SafeBuffer.allocUnsafeSlow = function (size) {
return buffer.SlowBuffer(size)
}
-},{"buffer":7}],26:[function(require,module,exports){
+},{"buffer":6}],25:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -4174,7 +4024,7 @@ Stream.prototype.pipe = function(dest, options) {
return dest;
};
-},{"events":11,"inherits":20,"readable-stream/lib/_stream_duplex.js":28,"readable-stream/lib/_stream_passthrough.js":29,"readable-stream/lib/_stream_readable.js":30,"readable-stream/lib/_stream_transform.js":31,"readable-stream/lib/_stream_writable.js":32,"readable-stream/lib/internal/streams/end-of-stream.js":36,"readable-stream/lib/internal/streams/pipeline.js":38}],27:[function(require,module,exports){
+},{"events":10,"inherits":19,"readable-stream/lib/_stream_duplex.js":27,"readable-stream/lib/_stream_passthrough.js":28,"readable-stream/lib/_stream_readable.js":29,"readable-stream/lib/_stream_transform.js":30,"readable-stream/lib/_stream_writable.js":31,"readable-stream/lib/internal/streams/end-of-stream.js":35,"readable-stream/lib/internal/streams/pipeline.js":37}],26:[function(require,module,exports){
'use strict';
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
@@ -4303,7 +4153,7 @@ createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
module.exports.codes = codes;
-},{}],28:[function(require,module,exports){
+},{}],27:[function(require,module,exports){
(function (process){(function (){
// Copyright Joyent, Inc. and other Node contributors.
//
@@ -4445,7 +4295,7 @@ Object.defineProperty(Duplex.prototype, 'destroyed', {
}
});
}).call(this)}).call(this,require('_process'))
-},{"./_stream_readable":30,"./_stream_writable":32,"_process":24,"inherits":20}],29:[function(require,module,exports){
+},{"./_stream_readable":29,"./_stream_writable":31,"_process":23,"inherits":19}],28:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -4485,7 +4335,7 @@ function PassThrough(options) {
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};
-},{"./_stream_transform":31,"inherits":20}],30:[function(require,module,exports){
+},{"./_stream_transform":30,"inherits":19}],29:[function(require,module,exports){
(function (process,global){(function (){
// Copyright Joyent, Inc. and other Node contributors.
//
@@ -5612,7 +5462,7 @@ function indexOf(xs, x) {
return -1;
}
}).call(this)}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"../errors":27,"./_stream_duplex":28,"./internal/streams/async_iterator":33,"./internal/streams/buffer_list":34,"./internal/streams/destroy":35,"./internal/streams/from":37,"./internal/streams/state":39,"./internal/streams/stream":40,"_process":24,"buffer":7,"events":11,"inherits":20,"string_decoder/":41,"util":6}],31:[function(require,module,exports){
+},{"../errors":26,"./_stream_duplex":27,"./internal/streams/async_iterator":32,"./internal/streams/buffer_list":33,"./internal/streams/destroy":34,"./internal/streams/from":36,"./internal/streams/state":38,"./internal/streams/stream":39,"_process":23,"buffer":6,"events":10,"inherits":19,"string_decoder/":40,"util":5}],30:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -5814,7 +5664,7 @@ function done(stream, er, data) {
if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
return stream.push(null);
}
-},{"../errors":27,"./_stream_duplex":28,"inherits":20}],32:[function(require,module,exports){
+},{"../errors":26,"./_stream_duplex":27,"inherits":19}],31:[function(require,module,exports){
(function (process,global){(function (){
// Copyright Joyent, Inc. and other Node contributors.
//
@@ -6514,7 +6364,7 @@ Writable.prototype._destroy = function (err, cb) {
cb(err);
};
}).call(this)}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"../errors":27,"./_stream_duplex":28,"./internal/streams/destroy":35,"./internal/streams/state":39,"./internal/streams/stream":40,"_process":24,"buffer":7,"inherits":20,"util-deprecate":43}],33:[function(require,module,exports){
+},{"../errors":26,"./_stream_duplex":27,"./internal/streams/destroy":34,"./internal/streams/state":38,"./internal/streams/stream":39,"_process":23,"buffer":6,"inherits":19,"util-deprecate":42}],32:[function(require,module,exports){
(function (process){(function (){
'use strict';
@@ -6724,7 +6574,7 @@ var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterat
module.exports = createReadableStreamAsyncIterator;
}).call(this)}).call(this,require('_process'))
-},{"./end-of-stream":36,"_process":24}],34:[function(require,module,exports){
+},{"./end-of-stream":35,"_process":23}],33:[function(require,module,exports){
'use strict';
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
@@ -6935,7 +6785,7 @@ function () {
return BufferList;
}();
-},{"buffer":7,"util":6}],35:[function(require,module,exports){
+},{"buffer":6,"util":5}],34:[function(require,module,exports){
(function (process){(function (){
'use strict'; // undocumented cb() API, needed for core, not for public API
@@ -7043,7 +6893,7 @@ module.exports = {
errorOrDestroy: errorOrDestroy
};
}).call(this)}).call(this,require('_process'))
-},{"_process":24}],36:[function(require,module,exports){
+},{"_process":23}],35:[function(require,module,exports){
// Ported from https://github.com/mafintosh/end-of-stream with
// permission from the author, Mathias Buus (@mafintosh).
'use strict';
@@ -7148,12 +6998,12 @@ function eos(stream, opts, callback) {
}
module.exports = eos;
-},{"../../../errors":27}],37:[function(require,module,exports){
+},{"../../../errors":26}],36:[function(require,module,exports){
module.exports = function () {
throw new Error('Readable.from is not available in the browser')
};
-},{}],38:[function(require,module,exports){
+},{}],37:[function(require,module,exports){
// Ported from https://github.com/mafintosh/pump with
// permission from the author, Mathias Buus (@mafintosh).
'use strict';
@@ -7251,7 +7101,7 @@ function pipeline() {
}
module.exports = pipeline;
-},{"../../../errors":27,"./end-of-stream":36}],39:[function(require,module,exports){
+},{"../../../errors":26,"./end-of-stream":35}],38:[function(require,module,exports){
'use strict';
var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;
@@ -7279,10 +7129,10 @@ function getHighWaterMark(state, options, duplexKey, isDuplex) {
module.exports = {
getHighWaterMark: getHighWaterMark
};
-},{"../../../errors":27}],40:[function(require,module,exports){
+},{"../../../errors":26}],39:[function(require,module,exports){
module.exports = require('events').EventEmitter;
-},{"events":11}],41:[function(require,module,exports){
+},{"events":10}],40:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -7579,7 +7429,7 @@ function simpleWrite(buf) {
function simpleEnd(buf) {
return buf && buf.length ? this.write(buf) : '';
}
-},{"safe-buffer":25}],42:[function(require,module,exports){
+},{"safe-buffer":24}],41:[function(require,module,exports){
(function (setImmediate,clearImmediate){(function (){
var nextTick = require('process/browser.js').nextTick;
var apply = Function.prototype.apply;
@@ -7658,7 +7508,7 @@ exports.clearImmediate = typeof clearImmediate === "function" ? clearImmediate :
delete immediateIds[id];
};
}).call(this)}).call(this,require("timers").setImmediate,require("timers").clearImmediate)
-},{"process/browser.js":24,"timers":42}],43:[function(require,module,exports){
+},{"process/browser.js":23,"timers":41}],42:[function(require,module,exports){
(function (global){(function (){
/**
@@ -7729,14 +7579,14 @@ function config (name) {
}
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{}],44:[function(require,module,exports){
+},{}],43:[function(require,module,exports){
module.exports = function isBuffer(arg) {
return arg && typeof arg === 'object'
&& typeof arg.copy === 'function'
&& typeof arg.fill === 'function'
&& typeof arg.readUInt8 === 'function';
}
-},{}],45:[function(require,module,exports){
+},{}],44:[function(require,module,exports){
// Currently in sync with Node.js lib/internal/util/types.js
// https://github.com/nodejs/node/commit/112cc7c27551254aa2b17098fb774867f05ed0d9
@@ -8072,7 +7922,7 @@ exports.isAnyArrayBuffer = isAnyArrayBuffer;
});
});
-},{"is-arguments":21,"is-generator-function":22,"is-typed-array":23,"which-typed-array":47}],46:[function(require,module,exports){
+},{"is-arguments":20,"is-generator-function":21,"is-typed-array":22,"which-typed-array":46}],45:[function(require,module,exports){
(function (process){(function (){
// Copyright Joyent, Inc. and other Node contributors.
//
@@ -8791,7 +8641,7 @@ function callbackify(original) {
exports.callbackify = callbackify;
}).call(this)}).call(this,require('_process'))
-},{"./support/isBuffer":44,"./support/types":45,"_process":24,"inherits":20}],47:[function(require,module,exports){
+},{"./support/isBuffer":43,"./support/types":44,"_process":23,"inherits":19}],46:[function(require,module,exports){
(function (global){(function (){
'use strict';
@@ -8851,5 +8701,5 @@ module.exports = function whichTypedArray(value) {
};
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"available-typed-arrays":4,"call-bind/callBound":8,"es-abstract/helpers/getOwnPropertyDescriptor":10,"foreach":12,"has-symbols":16,"is-typed-array":23}]},{},[3])(3)
+},{"available-typed-arrays":3,"call-bind/callBound":7,"es-abstract/helpers/getOwnPropertyDescriptor":9,"foreach":11,"has-symbols":15,"is-typed-array":22}]},{},[2])(2)
});
diff --git a/packages/stream-transform/lib/es5/index.js b/packages/stream-transform/lib/es5/index.js
index cbe3e7e35..4bc66e187 100644
--- a/packages/stream-transform/lib/es5/index.js
+++ b/packages/stream-transform/lib/es5/index.js
@@ -1,133 +1,53 @@
"use strict";
-function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-
-// Generated by CoffeeScript 2.5.1
-// # Stream Transformer
-// Pass all elements of an array or a stream to transform, filter and add. Features include:
-// * Extends the Node.js "stream.Transform" API.
-// * Both synchrounous and asynchronous support based and user callback
-// arguments signature.
-// * Ability to skip records.
-// * Sequential and concurrent execution using the "parallel" options.
-// Please look at the [README], the [samples] and the [tests] for additional
-// information.
-var Transformer, clone, stream, util;
-stream = require('stream');
-util = require('util');
-
-var _require = require('mixme');
-
-clone = _require.clone;
-
-// ## Usage
-// Callback approach, for ease of use:
-// `transform(records, [options], handler, callback)`
-// Stream API, for maximum of power:
-// `transform([records], [options], handler, [callback])`
-module.exports = function () {
- var argument, callback, error, handler, i, j, len, options, records, result, transform, type;
- options = {};
-
- for (i = j = 0, len = arguments.length; j < len; i = ++j) {
- argument = arguments[i];
- type = _typeof(argument);
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.transform = exports["default"] = void 0;
- if (argument === null) {
- type = 'null';
- } else if (type === 'object' && Array.isArray(argument)) {
- type = 'array';
- }
+var _stream = _interopRequireDefault(require("stream"));
- if (type === 'array') {
- records = argument;
- } else if (type === 'object') {
- options = clone(argument);
- } else if (type === 'function') {
- if (handler && i === arguments.length - 1) {
- callback = argument;
- } else {
- handler = argument;
- }
- } else if (type !== 'null') {
- throw new Error("Invalid Arguments: got ".concat(JSON.stringify(argument), " at position ").concat(i));
- }
- }
+var _util = _interopRequireDefault(require("util"));
- transform = new Transformer(options, handler);
- error = false;
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
- if (records) {
- setImmediate(function () {
- var k, len1, record;
+function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
- for (k = 0, len1 = records.length; k < len1; k++) {
- record = records[k];
+function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
- if (error) {
- break;
- }
+function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
- transform.write(record);
- }
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
- return transform.end();
- });
- }
+function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
- if (callback || options.consume) {
- result = [];
- transform.on('readable', function () {
- var record, results;
- results = [];
+function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
- while (record = transform.read()) {
- if (callback) {
- results.push(result.push(record));
- } else {
- results.push(void 0);
- }
- }
+function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
- return results;
- });
- transform.on('error', function (err) {
- error = true;
+var Transformer = function Transformer() {
+ var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+ var handler = arguments.length > 1 ? arguments[1] : undefined;
+ this.options = options;
- if (callback) {
- return callback(err);
- }
- });
- transform.on('end', function () {
- if (callback && !error) {
- return callback(null, result);
- }
- });
+ if (options.consume === undefined || options.consume === null) {
+ this.options.consume = false;
}
- return transform;
-}; // ## Transformer
-// Options are documented [here](http://csv.js.org/transform/options/).
-
+ this.options.objectMode = true;
-Transformer = function Transformer() {
- var options1 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
- var handler1 = arguments.length > 1 ? arguments[1] : undefined;
- var base, base1;
- this.options = options1;
- this.handler = handler1;
+ if (options.parallel === undefined || options.parallel === null) {
+ this.options.parallel = 100;
+ }
- if ((base = this.options).consume == null) {
- base.consume = false;
+ if (options.params === undefined || options.params === null) {
+ options.params = null;
}
- this.options.objectMode = true;
+ this.handler = handler;
- if ((base1 = this.options).parallel == null) {
- base1.parallel = 100;
- }
+ _stream["default"].Transform.call(this, this.options);
- stream.Transform.call(this, this.options);
this.state = {
running: 0,
started: 0,
@@ -136,25 +56,23 @@ Transformer = function Transformer() {
return this;
};
-util.inherits(Transformer, stream.Transform);
-module.exports.Transformer = Transformer;
+_util["default"].inherits(Transformer, _stream["default"].Transform);
Transformer.prototype._transform = function (chunk, encoding, cb) {
var _this = this;
- var callback, err, l;
this.state.started++;
this.state.running++;
if (this.state.running < this.options.parallel) {
cb();
- cb = null;
+ cb = null; // Cancel further callback execution
}
try {
- l = this.handler.length;
+ var l = this.handler.length;
- if (this.options.params != null) {
+ if (this.options.params !== null) {
l--;
}
@@ -163,7 +81,7 @@ Transformer.prototype._transform = function (chunk, encoding, cb) {
this.__done(null, [this.handler.call(this, chunk, this.options.params)], cb);
} else if (l === 2) {
// async
- callback = function callback(err) {
+ var callback = function callback(err) {
for (var _len = arguments.length, chunks = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
chunks[_key - 1] = arguments[_key];
}
@@ -173,29 +91,51 @@ Transformer.prototype._transform = function (chunk, encoding, cb) {
this.handler.call(this, chunk, callback, this.options.params);
} else {
- throw Error("Invalid handler arguments");
+ throw Error('Invalid handler arguments');
}
return false;
- } catch (error1) {
- err = error1;
- return this.__done(err);
+ } catch (err) {
+ this.__done(err);
}
};
Transformer.prototype._flush = function (cb) {
this._ending = function () {
if (this.state.running === 0) {
- this._ending = undefined;
- return cb();
+ cb();
}
};
- return this._ending();
-};
+ this._ending();
+}; // Transformer.prototype.__done = function(err, chunks, cb) {
+// var chunk, j, len;
+// this.state.running--;
+// if (err) {
+// return this.emit('error', err);
+// }
+// this.state.finished++;
+// for (j = 0, len = chunks.length; j < len; j++) {
+// chunk = chunks[j];
+// if (typeof chunk === 'number') {
+// chunk = `${chunk}`;
+// }
+// if ((chunk != null) && chunk !== '') {
+// // We dont push empty string
+// // See https://nodejs.org/api/stream.html#stream_readable_push
+// this.push(chunk);
+// }
+// }
+// if (cb) {
+// cb();
+// }
+// if (this._ending) {
+// return this._ending();
+// }
+// };
+
Transformer.prototype.__done = function (err, chunks, cb) {
- var chunk, j, len;
this.state.running--;
if (err) {
@@ -204,18 +144,27 @@ Transformer.prototype.__done = function (err, chunks, cb) {
this.state.finished++;
- for (j = 0, len = chunks.length; j < len; j++) {
- chunk = chunks[j];
+ var _iterator = _createForOfIteratorHelper(chunks),
+ _step;
- if (typeof chunk === 'number') {
- chunk = "".concat(chunk);
- }
+ try {
+ for (_iterator.s(); !(_step = _iterator.n()).done;) {
+ var chunk = _step.value;
- if (chunk != null && chunk !== '') {
- // We dont push empty string
+ if (typeof chunk === 'number') {
+ chunk = "".concat(chunk);
+ } // We dont push empty string
// See https://nodejs.org/api/stream.html#stream_readable_push
- this.push(chunk);
+
+
+ if (chunk !== undefined && chunk !== null && chunk !== '') {
+ this.push(chunk);
+ }
}
+ } catch (err) {
+ _iterator.e(err);
+ } finally {
+ _iterator.f();
}
if (cb) {
@@ -223,8 +172,88 @@ Transformer.prototype.__done = function (err, chunks, cb) {
}
if (this._ending) {
- return this._ending();
+ this._ending();
+ }
+};
+
+var transform = function transform() {
+ var options = {};
+ var callback, handler, records;
+
+ for (var i = 0; i < arguments.length; i++) {
+ var argument = arguments[i];
+
+ var type = _typeof(argument);
+
+ if (argument === null) {
+ type = 'null';
+ } else if (type === 'object' && Array.isArray(argument)) {
+ type = 'array';
+ }
+
+ if (type === 'array') {
+ records = argument;
+ } else if (type === 'object') {
+ options = _objectSpread({}, argument);
+ } else if (type === 'function') {
+ if (handler && i === arguments.length - 1) {
+ callback = argument;
+ } else {
+ handler = argument;
+ }
+ } else if (type !== 'null') {
+ throw new Error("Invalid Arguments: got ".concat(JSON.stringify(argument), " at position ").concat(i));
+ }
+ }
+
+ var transformer = new Transformer(options, handler);
+ var error = false;
+
+ if (records) {
+ setImmediate(function () {
+ var _iterator2 = _createForOfIteratorHelper(records),
+ _step2;
+
+ try {
+ for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
+ var record = _step2.value;
+ if (error) break;
+ transformer.write(record);
+ }
+ } catch (err) {
+ _iterator2.e(err);
+ } finally {
+ _iterator2.f();
+ }
+
+ transformer.end();
+ });
+ }
+
+ if (callback || options.consume) {
+ var result = [];
+ transformer.on('readable', function () {
+ var record;
+
+ while (record = transformer.read()) {
+ if (callback) {
+ result.push(record);
+ }
+ }
+ });
+ transformer.on('error', function (err) {
+ error = true;
+ if (callback) callback(err);
+ });
+ transformer.on('end', function () {
+ if (callback && !error) callback(null, result);
+ });
}
-}; // [readme]: https://github.com/wdavidw/node-stream-transform
-// [samples]: https://github.com/wdavidw/node-stream-transform/tree/master/samples
-// [tests]: https://github.com/wdavidw/node-stream-transform/tree/master/test
\ No newline at end of file
+
+ return transformer;
+};
+
+exports.transform = transform;
+transform.Transformer = Transformer;
+var _default = transform;
+exports["default"] = _default;
\ No newline at end of file
diff --git a/packages/stream-transform/lib/es5/sync.js b/packages/stream-transform/lib/es5/sync.js
index 7e74bfeed..3a5a36e87 100644
--- a/packages/stream-transform/lib/es5/sync.js
+++ b/packages/stream-transform/lib/es5/sync.js
@@ -1,30 +1,34 @@
"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports["default"] = _default;
+
+var _index = _interopRequireDefault(require("./index.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; }
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
-function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
-/*
-Stream Transform - sync module
+function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
-Please look at the [project documentation](https://csv.js.org/transform/) for
-additional information.
-*/
-var transform = require('.');
+function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-var _require = require('mixme'),
- clone = _require.clone;
+function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
-module.exports = function () {
+function _default() {
// Import arguments normalization
- var handler, callback;
+ var handler, callback, records;
var options = {};
- for (i in arguments) {
+ for (var i in arguments) {
var argument = arguments[i];
var type = _typeof(argument);
@@ -38,7 +42,7 @@ module.exports = function () {
if (type === 'array') {
records = argument;
} else if (type === 'object') {
- options = clone(argument);
+ options = _objectSpread({}, argument);
} else if (type === 'function') {
if (handler && i === arguments.length - 1) {
callback = argument;
@@ -63,7 +67,7 @@ module.exports = function () {
var chunks = [];
- var transformer = new transform.Transformer(options, handler);
+ var transformer = new _index["default"].Transformer(options, handler);
transformer.push = function (chunk) {
chunks.push(chunk);
@@ -85,4 +89,4 @@ module.exports = function () {
}
return chunks;
-};
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/packages/stream-transform/lib/index.d.ts b/packages/stream-transform/lib/index.d.ts
index 2db108d6d..75bd5d2c9 100644
--- a/packages/stream-transform/lib/index.d.ts
+++ b/packages/stream-transform/lib/index.d.ts
@@ -1,40 +1,41 @@
///
import * as stream from "stream";
-export = transform
-// transform([records], [options], handler, [callback])
+export type Handler = (record: T, callback: HandlerCallback, params?: any) => U
+export type HandlerCallback = (err?: null | Error, record?: T) => void
+export type Callback = (err?: null | Error, output?: string) => void
-declare function transform(handler: transform.Handler, callback?: transform.Callback): transform.Transformer
-declare function transform(records: Array, handler: transform.Handler, callback?: transform.Callback): transform.Transformer
-declare function transform(options: transform.Options, handler: transform.Handler, callback?: transform.Callback): transform.Transformer
-declare function transform(records: Array, options: transform.Options, handler: transform.Handler, callback?: transform.Callback): transform.Transformer
-declare namespace transform {
- type Handler = (record: T, callback: HandlerCallback, params?: any) => U
- type HandlerCallback = (err?: null | Error, record?: T) => void
- type Callback = (err?: null | Error, output?: string) => void
- interface Options {
- /**
- * In the absence of a consumer, like a `stream.Readable`, trigger the consumption of the stream.
- */
- consume?: boolean
- /**
- * The number of transformation callbacks to run in parallel; only apply with asynchronous handlers; default to "100".
- */
- parallel?: number
- /**
- * Pass user defined parameters to the user handler as last argument.
- */
- params?: any
- }
- interface State {
- finished: number
- running: number
- started: number
- }
- class Transformer extends stream.Transform {
- constructor(options: Options)
- readonly options: Options
- readonly state: State
- }
+export interface Options {
+ /**
+ * In the absence of a consumer, like a `stream.Readable`, trigger the consumption of the stream.
+ */
+ consume?: boolean
+ /**
+ * The number of transformation callbacks to run in parallel; only apply with asynchronous handlers; default to "100".
+ */
+ parallel?: number
+ /**
+ * Pass user defined parameters to the user handler as last argument.
+ */
+ params?: any
}
+export interface State {
+ finished: number
+ running: number
+ started: number
+}
+export class Transformer extends stream.Transform {
+ constructor(options: Options)
+ readonly options: Options
+ readonly state: State
+}
+
+declare function transform(handler: Handler, callback?: Callback): Transformer
+declare function transform(records: Array, handler: Handler, callback?: Callback): Transformer
+declare function transform(options: Options, handler: Handler, callback?: Callback): Transformer
+declare function transform(records: Array, options: Options, handler: Handler, callback?: Callback): Transformer
+
+export default transform;
+export {transform}
+// export = transform
diff --git a/packages/stream-transform/lib/index.js b/packages/stream-transform/lib/index.js
index d1598d0ee..39dc39acb 100644
--- a/packages/stream-transform/lib/index.js
+++ b/packages/stream-transform/lib/index.js
@@ -1,195 +1,180 @@
-// Generated by CoffeeScript 2.5.1
-// # Stream Transformer
-// Pass all elements of an array or a stream to transform, filter and add. Features include:
+/*
+Stream Transform
-// * Extends the Node.js "stream.Transform" API.
-// * Both synchrounous and asynchronous support based and user callback
-// arguments signature.
-// * Ability to skip records.
-// * Sequential and concurrent execution using the "parallel" options.
+Please look at the [project documentation](https://csv.js.org/transform/) for
+additional information.
+*/
-// Please look at the [README], the [samples] and the [tests] for additional
-// information.
-var Transformer, clone, stream, util;
+import stream from 'stream'
+import util from 'util'
-stream = require('stream');
-
-util = require('util');
-
-({clone} = require('mixme'));
-
-// ## Usage
-
-// Callback approach, for ease of use:
-
-// `transform(records, [options], handler, callback)`
-
-// Stream API, for maximum of power:
-
-// `transform([records], [options], handler, [callback])`
-module.exports = function() {
- var argument, callback, error, handler, i, j, len, options, records, result, transform, type;
- options = {};
- for (i = j = 0, len = arguments.length; j < len; i = ++j) {
- argument = arguments[i];
- type = typeof argument;
- if (argument === null) {
- type = 'null';
- } else if (type === 'object' && Array.isArray(argument)) {
- type = 'array';
- }
- if (type === 'array') {
- records = argument;
- } else if (type === 'object') {
- options = clone(argument);
- } else if (type === 'function') {
- if (handler && i === arguments.length - 1) {
- callback = argument;
- } else {
- handler = argument;
- }
- } else if (type !== 'null') {
- throw new Error(`Invalid Arguments: got ${JSON.stringify(argument)} at position ${i}`);
- }
- }
- transform = new Transformer(options, handler);
- error = false;
- if (records) {
- setImmediate(function() {
- var k, len1, record;
- for (k = 0, len1 = records.length; k < len1; k++) {
- record = records[k];
- if (error) {
- break;
- }
- transform.write(record);
- }
- return transform.end();
- });
- }
- if (callback || options.consume) {
- result = [];
- transform.on('readable', function() {
- var record, results;
- results = [];
- while ((record = transform.read())) {
- if (callback) {
- results.push(result.push(record));
- } else {
- results.push(void 0);
- }
- }
- return results;
- });
- transform.on('error', function(err) {
- error = true;
- if (callback) {
- return callback(err);
- }
- });
- transform.on('end', function() {
- if (callback && !error) {
- return callback(null, result);
- }
- });
+const Transformer = function(options = {}, handler){
+ this.options = options
+ if(options.consume === undefined || options.consume === null){
+ this.options.consume = false
}
- return transform;
-};
-
-// ## Transformer
-
-// Options are documented [here](http://csv.js.org/transform/options/).
-Transformer = function(options1 = {}, handler1) {
- var base, base1;
- this.options = options1;
- this.handler = handler1;
- if ((base = this.options).consume == null) {
- base.consume = false;
+ this.options.objectMode = true
+ if(options.parallel === undefined || options.parallel === null){
+ this.options.parallel = 100
}
- this.options.objectMode = true;
- if ((base1 = this.options).parallel == null) {
- base1.parallel = 100;
+ if(options.params === undefined || options.params === null){
+ options.params = null
}
- stream.Transform.call(this, this.options);
+ this.handler = handler
+ stream.Transform.call(this, this.options)
this.state = {
running: 0,
started: 0,
finished: 0
- };
- return this;
-};
-
-util.inherits(Transformer, stream.Transform);
+ }
+ return this
+}
-module.exports.Transformer = Transformer;
+util.inherits(Transformer, stream.Transform)
-Transformer.prototype._transform = function(chunk, encoding, cb) {
- var callback, err, l;
- this.state.started++;
- this.state.running++;
- if (this.state.running < this.options.parallel) {
- cb();
- cb = null;
+Transformer.prototype._transform = function(chunk, encoding, cb){
+ this.state.started++
+ this.state.running++
+ if(this.state.running < this.options.parallel){
+ cb()
+ cb = null // Cancel further callback execution
}
try {
- l = this.handler.length;
- if (this.options.params != null) {
- l--;
+ let l = this.handler.length
+ if(this.options.params !== null){
+ l--
}
- if (l === 1) { // sync
- this.__done(null, [this.handler.call(this, chunk, this.options.params)], cb);
- } else if (l === 2) { // async
- callback = (err, ...chunks) => {
- return this.__done(err, chunks, cb);
- };
- this.handler.call(this, chunk, callback, this.options.params);
- } else {
- throw Error("Invalid handler arguments");
+ if(l === 1){ // sync
+ this.__done(null, [this.handler.call(this, chunk, this.options.params)], cb)
+ }else if(l === 2){ // async
+ const callback = (err, ...chunks) =>
+ this.__done(err, chunks, cb)
+ this.handler.call(this, chunk, callback, this.options.params)
+ }else{
+ throw Error('Invalid handler arguments')
}
- return false;
- } catch (error1) {
- err = error1;
- return this.__done(err);
+ return false
}
-};
-
-Transformer.prototype._flush = function(cb) {
- this._ending = function() {
- if (this.state.running === 0) {
- this._ending = undefined
- return cb();
+ catch (err) {
+ this.__done(err)
+ }
+}
+Transformer.prototype._flush = function(cb){
+ this._ending = function(){
+ if(this.state.running === 0){
+ cb()
}
- };
- return this._ending();
-};
-
-Transformer.prototype.__done = function(err, chunks, cb) {
- var chunk, j, len;
- this.state.running--;
- if (err) {
- return this.emit('error', err);
}
- this.state.finished++;
- for (j = 0, len = chunks.length; j < len; j++) {
- chunk = chunks[j];
- if (typeof chunk === 'number') {
- chunk = `${chunk}`;
+ this._ending()
+}
+
+// Transformer.prototype.__done = function(err, chunks, cb) {
+// var chunk, j, len;
+// this.state.running--;
+// if (err) {
+// return this.emit('error', err);
+// }
+// this.state.finished++;
+// for (j = 0, len = chunks.length; j < len; j++) {
+// chunk = chunks[j];
+// if (typeof chunk === 'number') {
+// chunk = `${chunk}`;
+// }
+// if ((chunk != null) && chunk !== '') {
+// // We dont push empty string
+// // See https://nodejs.org/api/stream.html#stream_readable_push
+// this.push(chunk);
+// }
+// }
+// if (cb) {
+// cb();
+// }
+// if (this._ending) {
+// return this._ending();
+// }
+// };
+Transformer.prototype.__done = function(err, chunks, cb){
+ this.state.running--
+ if(err){
+ return this.emit('error', err)
+ }
+ this.state.finished++
+ for(let chunk of chunks){
+ if (typeof chunk === 'number'){
+ chunk = `${chunk}`
}
- if ((chunk != null) && chunk !== '') {
- // We dont push empty string
- // See https://nodejs.org/api/stream.html#stream_readable_push
- this.push(chunk);
+ // We dont push empty string
+ // See https://nodejs.org/api/stream.html#stream_readable_push
+ if(chunk !== undefined && chunk !== null && chunk !== ''){
+ this.push(chunk)
}
}
- if (cb) {
- cb();
+ if(cb){
+ cb()
}
- if (this._ending) {
- return this._ending();
+ if(this._ending){
+ this._ending()
}
-};
+}
+const transform = function(){
+ let options = {}
+ let callback, handler, records
+ for(let i = 0; i< arguments.length; i++){
+ const argument = arguments[i]
+ let type = typeof argument
+ if(argument === null){
+ type = 'null'
+ }else if(type === 'object' && Array.isArray(argument)){
+ type = 'array'
+ }
+ if(type === 'array'){
+ records = argument
+ }else if(type === 'object'){
+ options = {...argument}
+ }else if(type === 'function'){
+ if (handler && i === arguments.length - 1) {
+ callback = argument
+ } else {
+ handler = argument
+ }
+ }else if(type !== 'null'){
+ throw new Error(`Invalid Arguments: got ${JSON.stringify(argument)} at position ${i}`)
+ }
+ }
+ const transformer = new Transformer(options, handler)
+ let error = false
+ if (records) {
+ setImmediate(function(){
+ for(let record of records){
+ if(error) break
+ transformer.write(record)
+ }
+ transformer.end()
+ })
+ }
+ if(callback || options.consume) {
+ const result = []
+ transformer.on( 'readable', function(){
+ let record
+ while(record = transformer.read()){
+ if(callback){
+ result.push(record)
+ }
+ }
+ })
+ transformer.on( 'error', function(err){
+ error = true
+ if (callback) callback(err)
+ })
+ transformer.on( 'end', function(){
+ if (callback && !error) callback(null, result)
+ })
+ }
+ return transformer
+}
+
+transform.Transformer = Transformer
-// [readme]: https://github.com/wdavidw/node-stream-transform
-// [samples]: https://github.com/wdavidw/node-stream-transform/tree/master/samples
-// [tests]: https://github.com/wdavidw/node-stream-transform/tree/master/test
+export default transform
+export {transform}
diff --git a/packages/stream-transform/lib/sync.d.ts b/packages/stream-transform/lib/sync.d.ts
index 210192774..38c64d86a 100644
--- a/packages/stream-transform/lib/sync.d.ts
+++ b/packages/stream-transform/lib/sync.d.ts
@@ -1,11 +1,9 @@
///
-import * as streamTransform from './index';
-export = transform
+import {Options} from './index';
-// transform(records, [options], handler)
+export type Handler = (record: T) => U
+export function transform(records: Array, handler: Handler): Array
+export function transform(records: Array, options: Options, handler: Handler): Array
-type Handler = (record: T) => U
-declare function transform(records: Array, handler: Handler): Array
-declare function transform(records: Array, options: streamTransform.Options, handler: Handler): Array
-declare namespace transform { }
+export default transform;
diff --git a/packages/stream-transform/lib/sync.js b/packages/stream-transform/lib/sync.js
index 3f704b0e6..8082e70b3 100644
--- a/packages/stream-transform/lib/sync.js
+++ b/packages/stream-transform/lib/sync.js
@@ -6,14 +6,13 @@ Please look at the [project documentation](https://csv.js.org/transform/) for
additional information.
*/
-const transform = require('.')
-const {clone} = require('mixme')
+import transform from './index.js'
-module.exports = function(){
+export default function(){
// Import arguments normalization
- let handler, callback
+ let handler, callback, records
let options = {}
- for(i in arguments){
+ for(const i in arguments){
const argument = arguments[i]
let type = typeof argument
if(argument === null){
@@ -24,7 +23,7 @@ module.exports = function(){
if(type === 'array'){
records = argument
}else if(type === 'object'){
- options = clone(argument)
+ options = {...argument}
}else if(type === 'function'){
if(handler && i === arguments.length - 1){
callback = argument
diff --git a/packages/stream-transform/package.json b/packages/stream-transform/package.json
index e7cb3a836..e1658db8a 100644
--- a/packages/stream-transform/package.json
+++ b/packages/stream-transform/package.json
@@ -50,16 +50,15 @@
],
"main": "./lib",
"mocha": {
- "throw-deprecation": true,
- "require": [
- "should",
- "coffeescript/register",
- "ts-node/register"
- ],
"inline-diffs": true,
- "timeout": 40000,
+ "loader": "./test/loaders/all.mjs",
+ "recursive": true,
"reporter": "spec",
- "recursive": true
+ "require": [
+ "should"
+ ],
+ "throw-deprecation": true,
+ "timeout": 40000
},
"scripts": {
"build:babel": "cd lib && babel *.js -d es5 && cd ..",
@@ -69,13 +68,11 @@
"pretest": "npm run build",
"test": "mocha test/**/*.{coffee,ts}"
},
+ "type": "module",
"types": [
"./lib/es5/index.d.ts",
"./lib/es5/sync.d.ts",
"./lib/index.d.ts",
"./lib/sync.d.ts"
- ],
- "dependencies": {
- "mixme": "^0.5.1"
- }
+ ]
}
diff --git a/packages/stream-transform/samples/api.callback.js b/packages/stream-transform/samples/api.callback.js
index 04a43903f..2a2a44ee4 100644
--- a/packages/stream-transform/samples/api.callback.js
+++ b/packages/stream-transform/samples/api.callback.js
@@ -1,6 +1,6 @@
-const transform = require('..')
-const assert = require('assert')
+import transform from '../lib/index.js'
+import assert from assert
transform([
['1','2','3','4'],
diff --git a/packages/stream-transform/samples/api.stream.js b/packages/stream-transform/samples/api.stream.js
index 469418c1a..90db14a78 100644
--- a/packages/stream-transform/samples/api.stream.js
+++ b/packages/stream-transform/samples/api.stream.js
@@ -1,6 +1,6 @@
-const transform = require('..')
-const assert = require('assert')
+import transform from '../lib/index.js'
+import assert from assert
const output = []
const transformer = transform(function(data){
diff --git a/packages/stream-transform/samples/api.sync.js b/packages/stream-transform/samples/api.sync.js
index 97650cdf4..13686d6c5 100644
--- a/packages/stream-transform/samples/api.sync.js
+++ b/packages/stream-transform/samples/api.sync.js
@@ -1,6 +1,6 @@
-const transform = require('../lib/sync')
-const assert = require('assert')
+import transform from '../lib/index.js'
+import assert from assert
const records = transform([
[ 'a', 'b', 'c', 'd' ],
diff --git a/packages/stream-transform/samples/mixed.output_stream.js b/packages/stream-transform/samples/mixed.output_stream.js
index 45cb4912f..2ed22eb4a 100644
--- a/packages/stream-transform/samples/mixed.output_stream.js
+++ b/packages/stream-transform/samples/mixed.output_stream.js
@@ -1,6 +1,6 @@
-const transform = require('..')
-const assert = require('assert')
+import transform from '../lib/index.js'
+import assert from assert
const output = []
transform([
diff --git a/packages/stream-transform/samples/mode.sequential.js b/packages/stream-transform/samples/mode.sequential.js
index 342c8edca..57858037d 100644
--- a/packages/stream-transform/samples/mode.sequential.js
+++ b/packages/stream-transform/samples/mode.sequential.js
@@ -1,6 +1,6 @@
-const transform = require('..')
-const assert = require('assert')
+import transform from '../lib/index.js'
+import assert from assert
// Generate a dataset of 500 records
const records = '.'.repeat(500).split('.').map( (_, i) => i )
diff --git a/packages/stream-transform/samples/module.async.js b/packages/stream-transform/samples/module.async.js
index 762f4569f..b08846d83 100644
--- a/packages/stream-transform/samples/module.async.js
+++ b/packages/stream-transform/samples/module.async.js
@@ -1,5 +1,5 @@
-const transform = require('..')
+import transform from '../lib/index.js'
transform([
['1','2','3','4'],
diff --git a/packages/stream-transform/samples/module.sync.js b/packages/stream-transform/samples/module.sync.js
index 2b4a73d20..2a5f48c29 100644
--- a/packages/stream-transform/samples/module.sync.js
+++ b/packages/stream-transform/samples/module.sync.js
@@ -1,5 +1,5 @@
-const transform = require('..')
+import transform from '../lib/index.js'
transform([
['1','2','3','4'],
diff --git a/packages/stream-transform/samples/state.handler.js b/packages/stream-transform/samples/state.handler.js
index 1c034390e..5b9f9ef99 100644
--- a/packages/stream-transform/samples/state.handler.js
+++ b/packages/stream-transform/samples/state.handler.js
@@ -1,6 +1,7 @@
-const transform = require('..')
-const assert = require('assert')
+import transform from '../lib/index.js'
+import assert from assert
+
// Generate a dataset of 5 records
const records = 'record\n'.repeat(5).trim().split('\n')
let test_running = records.length
diff --git a/packages/stream-transform/samples/state.instance.js b/packages/stream-transform/samples/state.instance.js
index 1a86638e1..1d287eb1d 100644
--- a/packages/stream-transform/samples/state.instance.js
+++ b/packages/stream-transform/samples/state.instance.js
@@ -1,5 +1,6 @@
-const transform = require('..')
+import transform from '../lib/index.js'
+
// Generate a dataset of 5 records
const records = 'record\n'.repeat(5).trim().split('\n')
// Initialize the transformation
diff --git a/packages/stream-transform/test/api.callback.coffee b/packages/stream-transform/test/api.callback.coffee
index 4cc30c5fb..721ccda5c 100644
--- a/packages/stream-transform/test/api.callback.coffee
+++ b/packages/stream-transform/test/api.callback.coffee
@@ -1,6 +1,6 @@
-generate = require 'csv-generate'
-transform = require '../lib'
+import generate from 'csv-generate'
+import transform from '../lib/index.js'
describe 'api.callback', ->
diff --git a/packages/stream-transform/test/api.pipe.coffee b/packages/stream-transform/test/api.pipe.coffee
index a4fca6c4f..c2cd9f791 100644
--- a/packages/stream-transform/test/api.pipe.coffee
+++ b/packages/stream-transform/test/api.pipe.coffee
@@ -1,7 +1,7 @@
-stream = require 'stream'
-generate = require 'csv-generate'
-transform = require '../lib'
+import stream from 'stream'
+import generate from 'csv-generate'
+import transform from '../lib/index.js'
describe 'api.pipe', ->
diff --git a/packages/stream-transform/test/api.sync.coffee b/packages/stream-transform/test/api.sync.coffee
index 0ae2bdee7..6e3a8886b 100644
--- a/packages/stream-transform/test/api.sync.coffee
+++ b/packages/stream-transform/test/api.sync.coffee
@@ -1,5 +1,5 @@
-transform = require '../lib/sync'
+import transform from '../lib/sync.js'
describe 'api.sync', ->
diff --git a/packages/stream-transform/test/api.types.ts b/packages/stream-transform/test/api.types.ts
index 095623ead..91779b47b 100644
--- a/packages/stream-transform/test/api.types.ts
+++ b/packages/stream-transform/test/api.types.ts
@@ -1,8 +1,7 @@
import 'should'
-import * as transform from '../lib/index'
-import * as transformSync from '../lib/sync'
-import {Options, Transformer} from '../lib/index'
+import transform, {Options, Transformer} from '../lib/index.js'
+import transformSync from '../lib/sync.js'
describe('api.types', () => {
@@ -41,7 +40,7 @@ describe('api.types', () => {
const options: Options = transformer.options
const keys: any = Object.keys(options)
keys.sort().should.eql([
- 'consume', 'objectMode', 'parallel'
+ 'consume', 'objectMode', 'parallel', 'params'
])
})
diff --git a/packages/stream-transform/test/handler.async.coffee b/packages/stream-transform/test/handler.async.coffee
index 4470b2568..a11ccb410 100644
--- a/packages/stream-transform/test/handler.async.coffee
+++ b/packages/stream-transform/test/handler.async.coffee
@@ -1,5 +1,5 @@
-transform = require '../lib'
+import transform from '../lib/index.js'
describe 'handler.async', ->
diff --git a/packages/stream-transform/test/handler.coffee b/packages/stream-transform/test/handler.coffee
index cbc263fdb..c31ebf6a8 100644
--- a/packages/stream-transform/test/handler.coffee
+++ b/packages/stream-transform/test/handler.coffee
@@ -1,5 +1,5 @@
-transform = require '../lib'
+import transform from '../lib/index.js'
describe 'handler', ->
diff --git a/packages/stream-transform/test/handler.error.coffee b/packages/stream-transform/test/handler.error.coffee
index 2e703d3d0..0885d6b3c 100644
--- a/packages/stream-transform/test/handler.error.coffee
+++ b/packages/stream-transform/test/handler.error.coffee
@@ -1,6 +1,6 @@
-generate = require 'csv-generate'
-transform = require '../lib'
+import generate from 'csv-generate'
+import transform from '../lib/index.js'
describe 'handler.error', ->
diff --git a/packages/stream-transform/test/handler.sync.coffee b/packages/stream-transform/test/handler.sync.coffee
index a84e49397..ab91dc38b 100644
--- a/packages/stream-transform/test/handler.sync.coffee
+++ b/packages/stream-transform/test/handler.sync.coffee
@@ -1,6 +1,6 @@
-transform = require '../lib'
-generate = require 'csv-generate'
+import generate from 'csv-generate'
+import transform from '../lib/index.js'
describe 'handler.sync', ->
diff --git a/packages/stream-transform/test/handler.types.coffee b/packages/stream-transform/test/handler.types.coffee
index 705e1a5f8..ea13ec762 100644
--- a/packages/stream-transform/test/handler.types.coffee
+++ b/packages/stream-transform/test/handler.types.coffee
@@ -1,5 +1,5 @@
-transform = require '../lib'
+import transform from '../lib/index.js'
describe 'handler.types', ->
diff --git a/packages/stream-transform/test/loaders/all.mjs b/packages/stream-transform/test/loaders/all.mjs
new file mode 100644
index 000000000..4a3828589
--- /dev/null
+++ b/packages/stream-transform/test/loaders/all.mjs
@@ -0,0 +1,37 @@
+
+import * as coffee from './coffee.mjs'
+import * as ts from 'ts-node/esm'
+
+const coffeeRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+const tsRegex = /\.ts$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ if (coffeeRegex.test(specifier)) {
+ return coffee.resolve.apply(this, arguments)
+ }
+ if (tsRegex.test(specifier)) {
+ return ts.resolve.apply(this, arguments)
+ }
+ return ts.resolve.apply(this, arguments);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ if (coffeeRegex.test(url)) {
+ return coffee.getFormat.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.getFormat.apply(this, arguments)
+ }
+ return ts.getFormat.apply(this, arguments);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url } = context;
+ if (coffeeRegex.test(url)) {
+ return coffee.transformSource.apply(this, arguments)
+ }
+ if (tsRegex.test(url)) {
+ return ts.transformSource.apply(this, arguments)
+ }
+ return ts.transformSource.apply(this, arguments);
+}
diff --git a/packages/stream-transform/test/loaders/coffee.mjs b/packages/stream-transform/test/loaders/coffee.mjs
new file mode 100644
index 000000000..f4945adb7
--- /dev/null
+++ b/packages/stream-transform/test/loaders/coffee.mjs
@@ -0,0 +1,50 @@
+// coffeescript-loader.mjs
+import { URL, pathToFileURL } from 'url';
+import CoffeeScript from 'coffeescript';
+import { cwd } from 'process';
+
+const baseURL = pathToFileURL(`${cwd()}/`).href;
+
+// CoffeeScript files end in .coffee, .litcoffee or .coffee.md.
+const extensionsRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/;
+
+export function resolve(specifier, context, defaultResolve) {
+ const { parentURL = baseURL } = context;
+ // Node.js normally errors on unknown file extensions, so return a URL for
+ // specifiers ending in the CoffeeScript file extensions.
+ if (extensionsRegex.test(specifier)) {
+ return {
+ url: new URL(specifier, parentURL).href,
+ stop: true
+ };
+ }
+ // Let Node.js handle all other specifiers.
+ return defaultResolve(specifier, context, defaultResolve);
+}
+
+export function getFormat(url, context, defaultGetFormat) {
+ // Now that we patched resolve to let CoffeeScript URLs through, we need to
+ // tell Node.js what format such URLs should be interpreted as. For the
+ // purposes of this loader, all CoffeeScript URLs are ES modules.
+ if (extensionsRegex.test(url)) {
+ return {
+ format: 'module',
+ stop: true
+ };
+ }
+ // Let Node.js handle all other URLs.
+ return defaultGetFormat(url, context, defaultGetFormat);
+}
+
+export function transformSource(source, context, defaultTransformSource) {
+ const { url, format } = context;
+
+ if (extensionsRegex.test(url)) {
+ return {
+ source: CoffeeScript.compile(String(source), { bare: true })
+ };
+ }
+
+ // Let Node.js handle all other sources.
+ return defaultTransformSource(source, context, defaultTransformSource);
+}
diff --git a/packages/stream-transform/test/option.consume.coffee b/packages/stream-transform/test/option.consume.coffee
index 6596a00bb..b73fd48bc 100644
--- a/packages/stream-transform/test/option.consume.coffee
+++ b/packages/stream-transform/test/option.consume.coffee
@@ -1,7 +1,6 @@
-pad = require 'pad'
-generate = require 'csv-generate'
-transform = require '../lib'
+import generate from 'csv-generate'
+import transform from '../lib/index.js'
describe 'option.consume', ->
diff --git a/packages/stream-transform/test/option.parallel.coffee b/packages/stream-transform/test/option.parallel.coffee
index b12e2e9f2..5331c34c5 100644
--- a/packages/stream-transform/test/option.parallel.coffee
+++ b/packages/stream-transform/test/option.parallel.coffee
@@ -1,7 +1,7 @@
-pad = require 'pad'
-generate = require 'csv-generate'
-transform = require '../lib'
+import pad from 'pad'
+import generate from 'csv-generate'
+import transform from '../lib/index.js'
letters = (number) ->
text = "#{number}"
diff --git a/packages/stream-transform/test/option.params.coffee b/packages/stream-transform/test/option.params.coffee
index b55f155fd..99eb9c9e4 100644
--- a/packages/stream-transform/test/option.params.coffee
+++ b/packages/stream-transform/test/option.params.coffee
@@ -1,6 +1,6 @@
-generate = require 'csv-generate'
-transform = require '../lib'
+import generate from 'csv-generate'
+import transform from '../lib/index.js'
describe 'option.params', ->
diff --git a/packages/stream-transform/test/samples.coffee b/packages/stream-transform/test/samples.coffee
index 7d2906b71..e85d0ac1e 100644
--- a/packages/stream-transform/test/samples.coffee
+++ b/packages/stream-transform/test/samples.coffee
@@ -1,19 +1,17 @@
-fs = require('fs').promises
-util = require 'util'
-path = require 'path'
-{exec} = require 'child_process'
-each = require 'each'
-
-it 'samples', ->
- dir = path.resolve __dirname, '../samples'
- samples = await fs.readdir dir
- each samples.filter( (sample) -> /\.js/.test.sample)
- .call (sample, callback) ->
- exec "node #{path.resolve dir, sample}", (err) ->
- callback err
- .promise()
-
-
-
-
+import fs from 'fs'
+import path from 'path'
+import {exec} from 'child_process'
+
+import { fileURLToPath } from 'url';
+__dirname = path.dirname fileURLToPath `import.meta.url`
+dir = path.resolve __dirname, '../samples'
+samples = fs.readdirSync dir
+
+describe 'Samples', ->
+
+ for sample in samples
+ continue unless /\.js$/.test sample
+ it "Sample #{sample}", (callback) ->
+ exec "node #{path.resolve dir, sample}", (err) ->
+ callback err
diff --git a/packages/stream-transform/test/state.finished.coffee b/packages/stream-transform/test/state.finished.coffee
index becb05f97..082bb20c2 100644
--- a/packages/stream-transform/test/state.finished.coffee
+++ b/packages/stream-transform/test/state.finished.coffee
@@ -1,5 +1,5 @@
-transform = require '../lib'
+import transform from '../lib/index.js'
describe 'state.finished', ->
diff --git a/packages/stream-transform/test/state.running.coffee b/packages/stream-transform/test/state.running.coffee
index e69fc3154..720cfcb9f 100644
--- a/packages/stream-transform/test/state.running.coffee
+++ b/packages/stream-transform/test/state.running.coffee
@@ -1,5 +1,5 @@
-transform = require '../lib'
+import transform from '../lib/index.js'
describe 'state.running', ->
diff --git a/packages/stream-transform/test/state.started.coffee b/packages/stream-transform/test/state.started.coffee
index fee4d5548..fe0151582 100644
--- a/packages/stream-transform/test/state.started.coffee
+++ b/packages/stream-transform/test/state.started.coffee
@@ -1,5 +1,5 @@
-transform = require '../lib'
+import transform from '../lib/index.js'
describe 'state.started', ->
diff --git a/packages/stream-transform/tsconfig.json b/packages/stream-transform/tsconfig.json
index 9f40782ea..4db508a7c 100644
--- a/packages/stream-transform/tsconfig.json
+++ b/packages/stream-transform/tsconfig.json
@@ -1,7 +1,8 @@
{
"compileOnSave": false,
"compilerOptions": {
- "target": "es6",
+ "esModuleInterop": true,
+ "module": "ES2020",
"moduleResolution": "node",
"strict": true,
}