From bd693009ff43b1b340d9d105e462cc0855d971ae Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Wed, 8 Mar 2023 11:12:45 -0700 Subject: [PATCH] deps: npm update --- DEPENDENCIES.md | 3 +- node_modules/.gitignore | 3 + node_modules/@tufjs/models/LICENSE | 21 + .../models => @tufjs/models/dist}/base.d.ts | 9 +- .../models => @tufjs/models/dist}/base.js | 18 +- .../models/dist}/delegations.d.ts | 2 +- .../models/dist}/delegations.js | 12 +- node_modules/@tufjs/models/dist/error.d.ts | 12 + node_modules/@tufjs/models/dist/error.js | 27 + .../models => @tufjs/models/dist}/file.d.ts | 2 +- .../models => @tufjs/models/dist}/file.js | 10 +- node_modules/@tufjs/models/dist/index.d.ts | 10 + node_modules/@tufjs/models/dist/index.js | 24 + .../models => @tufjs/models/dist}/key.d.ts | 2 +- .../dist/models => @tufjs/models/dist}/key.js | 34 +- .../models/dist}/metadata.d.ts | 7 +- .../models => @tufjs/models/dist}/metadata.js | 41 +- .../models => @tufjs/models/dist}/role.d.ts | 2 +- .../models => @tufjs/models/dist}/role.js | 15 +- .../models => @tufjs/models/dist}/root.d.ts | 5 +- .../models => @tufjs/models/dist}/root.js | 25 +- .../models/dist}/signature.d.ts | 3 +- .../models/dist}/signature.js | 6 + .../models/dist}/snapshot.d.ts | 4 +- .../models => @tufjs/models/dist}/snapshot.js | 12 +- .../models/dist}/targets.d.ts | 5 +- .../models => @tufjs/models/dist}/targets.js | 17 +- .../models/dist}/timestamp.d.ts | 4 +- .../models/dist}/timestamp.js | 10 +- .../models}/dist/utils/guard.d.ts | 3 +- .../models}/dist/utils/guard.js | 8 +- .../@tufjs/models/dist/utils/index.d.ts | 3 + .../models}/dist/utils/index.js | 7 +- .../models}/dist/utils/json.d.ts | 0 .../models}/dist/utils/json.js | 0 .../models}/dist/utils/key.d.ts | 0 .../models}/dist/utils/key.js | 0 .../models}/dist/utils/oid.d.ts | 0 .../models}/dist/utils/oid.js | 0 .../models}/dist/utils/types.d.ts | 6 - .../@tufjs/models/dist/utils/types.js | 2 + .../models/dist/utils/verify.d.ts} | 0 .../models/dist/utils/verify.js} | 0 node_modules/@tufjs/models/package.json | 41 ++ node_modules/agentkeepalive/History.md | 8 + node_modules/agentkeepalive/package.json | 20 +- node_modules/depd/History.md | 7 + node_modules/depd/LICENSE | 2 +- node_modules/depd/index.js | 42 +- .../depd/lib/compat/callsite-tostring.js | 103 ---- .../depd/lib/compat/event-listener-count.js | 22 - node_modules/depd/lib/compat/index.js | 79 --- node_modules/depd/package.json | 22 +- node_modules/lru-cache/index.d.ts | 216 +++++++- node_modules/lru-cache/index.js | 254 +++++++-- node_modules/lru-cache/index.mjs | 254 +++++++-- node_modules/lru-cache/package.json | 4 +- .../readable-stream/lib/_stream_duplex.js | 53 +- .../lib/_stream_passthrough.js | 8 +- .../readable-stream/lib/_stream_readable.js | 524 +++++++----------- .../readable-stream/lib/_stream_transform.js | 74 ++- .../readable-stream/lib/_stream_writable.js | 327 +++++------ .../lib/internal/streams/async_iterator.js | 204 +++---- .../lib/internal/streams/buffer_list.js | 325 +++++------ .../lib/internal/streams/destroy.js | 58 +- .../lib/internal/streams/end-of-stream.js | 42 +- .../lib/internal/streams/from.js | 42 +- .../lib/internal/streams/pipeline.js | 49 +- .../lib/internal/streams/state.js | 17 +- .../node_modules/readable-stream/package.json | 2 +- node_modules/spdx-correct/index.js | 26 +- node_modules/spdx-correct/package.json | 17 +- node_modules/tuf-js/LICENSE | 2 +- .../tuf-js/dist/{utils => }/config.d.ts | 0 .../tuf-js/dist/{utils => }/config.js | 0 node_modules/tuf-js/dist/error.d.ts | 8 - node_modules/tuf-js/dist/error.js | 16 +- node_modules/tuf-js/dist/fetcher.d.ts | 8 +- node_modules/tuf-js/dist/fetcher.js | 6 +- node_modules/tuf-js/dist/index.d.ts | 4 +- node_modules/tuf-js/dist/index.js | 6 +- node_modules/tuf-js/dist/models/index.d.ts | 5 - node_modules/tuf-js/dist/models/index.js | 13 - node_modules/tuf-js/dist/store.d.ts | 2 +- node_modules/tuf-js/dist/store.js | 33 +- node_modules/tuf-js/dist/updater.d.ts | 8 +- node_modules/tuf-js/dist/updater.js | 24 +- node_modules/tuf-js/dist/utils/index.d.ts | 5 - node_modules/tuf-js/dist/utils/types.js | 10 - node_modules/tuf-js/package.json | 42 +- package-lock.json | 231 ++++---- 91 files changed, 1921 insertions(+), 1718 deletions(-) create mode 100644 node_modules/@tufjs/models/LICENSE rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/base.d.ts (78%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/base.js (81%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/delegations.d.ts (94%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/delegations.js (93%) create mode 100644 node_modules/@tufjs/models/dist/error.d.ts create mode 100644 node_modules/@tufjs/models/dist/error.js rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/file.d.ts (95%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/file.js (95%) create mode 100644 node_modules/@tufjs/models/dist/index.d.ts create mode 100644 node_modules/@tufjs/models/dist/index.js rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/key.d.ts (91%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/key.js (68%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/metadata.d.ts (90%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/metadata.js (82%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/role.d.ts (98%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/role.js (96%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/root.d.ts (85%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/root.js (84%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/signature.d.ts (88%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/signature.js (89%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/snapshot.d.ts (87%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/snapshot.js (89%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/targets.d.ts (80%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/targets.js (88%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/timestamp.d.ts (85%) rename node_modules/{tuf-js/dist/models => @tufjs/models/dist}/timestamp.js (86%) rename node_modules/{tuf-js => @tufjs/models}/dist/utils/guard.d.ts (78%) rename node_modules/{tuf-js => @tufjs/models}/dist/utils/guard.js (74%) create mode 100644 node_modules/@tufjs/models/dist/utils/index.d.ts rename node_modules/{tuf-js => @tufjs/models}/dist/utils/index.js (79%) rename node_modules/{tuf-js => @tufjs/models}/dist/utils/json.d.ts (100%) rename node_modules/{tuf-js => @tufjs/models}/dist/utils/json.js (100%) rename node_modules/{tuf-js => @tufjs/models}/dist/utils/key.d.ts (100%) rename node_modules/{tuf-js => @tufjs/models}/dist/utils/key.js (100%) rename node_modules/{tuf-js => @tufjs/models}/dist/utils/oid.d.ts (100%) rename node_modules/{tuf-js => @tufjs/models}/dist/utils/oid.js (100%) rename node_modules/{tuf-js => @tufjs/models}/dist/utils/types.d.ts (51%) create mode 100644 node_modules/@tufjs/models/dist/utils/types.js rename node_modules/{tuf-js/dist/utils/signer.d.ts => @tufjs/models/dist/utils/verify.d.ts} (100%) rename node_modules/{tuf-js/dist/utils/signer.js => @tufjs/models/dist/utils/verify.js} (100%) create mode 100644 node_modules/@tufjs/models/package.json delete mode 100644 node_modules/depd/lib/compat/callsite-tostring.js delete mode 100644 node_modules/depd/lib/compat/event-listener-count.js delete mode 100644 node_modules/depd/lib/compat/index.js rename node_modules/tuf-js/dist/{utils => }/config.d.ts (100%) rename node_modules/tuf-js/dist/{utils => }/config.js (100%) delete mode 100644 node_modules/tuf-js/dist/models/index.d.ts delete mode 100644 node_modules/tuf-js/dist/models/index.js delete mode 100644 node_modules/tuf-js/dist/utils/index.d.ts delete mode 100644 node_modules/tuf-js/dist/utils/types.js diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index 89c0eb4103615..d63945a4b9edd 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -764,7 +764,8 @@ graph LR; tar-->mkdirp; tar-->yallist; tuf-js-->make-fetch-happen; - tuf-js-->minimatch; + tuf-js-->tufjs-models["@tufjs/models"]; + tufjs-models-->minimatch; unique-filename-->unique-slug; unique-slug-->imurmurhash; validate-npm-package-license-->spdx-correct; diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 73b8929986036..4de626fa8c6c4 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -32,6 +32,9 @@ !/@tootallnate/ /@tootallnate/* !/@tootallnate/once +!/@tufjs/ +/@tufjs/* +!/@tufjs/models !/abbrev !/abort-controller !/agent-base diff --git a/node_modules/@tufjs/models/LICENSE b/node_modules/@tufjs/models/LICENSE new file mode 100644 index 0000000000000..420700f5d3765 --- /dev/null +++ b/node_modules/@tufjs/models/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/tuf-js/dist/models/base.d.ts b/node_modules/@tufjs/models/dist/base.d.ts similarity index 78% rename from node_modules/tuf-js/dist/models/base.d.ts rename to node_modules/@tufjs/models/dist/base.d.ts index 4c5e0aaf4faaa..4cc2395328592 100644 --- a/node_modules/tuf-js/dist/models/base.d.ts +++ b/node_modules/@tufjs/models/dist/base.d.ts @@ -1,5 +1,5 @@ -import { JSONObject, JSONValue } from '../utils/types'; import { Signature } from './signature'; +import { JSONObject, JSONValue } from './utils'; export interface Signable { signatures: Record; signed: Signed; @@ -10,6 +10,13 @@ export interface SignedOptions { expires?: string; unrecognizedFields?: Record; } +export declare enum MetadataKind { + Root = "root", + Timestamp = "timestamp", + Snapshot = "snapshot", + Targets = "targets" +} +export declare function isMetadataKind(value: unknown): value is MetadataKind; /*** * A base class for the signed part of TUF metadata. * diff --git a/node_modules/tuf-js/dist/models/base.js b/node_modules/@tufjs/models/dist/base.js similarity index 81% rename from node_modules/tuf-js/dist/models/base.js rename to node_modules/@tufjs/models/dist/base.js index 7658567e2d602..d89a089c33092 100644 --- a/node_modules/tuf-js/dist/models/base.js +++ b/node_modules/@tufjs/models/dist/base.js @@ -3,11 +3,23 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.Signed = void 0; +exports.Signed = exports.isMetadataKind = exports.MetadataKind = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const utils_1 = require("../utils"); +const error_1 = require("./error"); +const utils_1 = require("./utils"); const SPECIFICATION_VERSION = ['1', '0', '31']; +var MetadataKind; +(function (MetadataKind) { + MetadataKind["Root"] = "root"; + MetadataKind["Timestamp"] = "timestamp"; + MetadataKind["Snapshot"] = "snapshot"; + MetadataKind["Targets"] = "targets"; +})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {})); +function isMetadataKind(value) { + return (typeof value === 'string' && + Object.values(MetadataKind).includes(value)); +} +exports.isMetadataKind = isMetadataKind; /*** * A base class for the signed part of TUF metadata. * diff --git a/node_modules/tuf-js/dist/models/delegations.d.ts b/node_modules/@tufjs/models/dist/delegations.d.ts similarity index 94% rename from node_modules/tuf-js/dist/models/delegations.d.ts rename to node_modules/@tufjs/models/dist/delegations.d.ts index b53862aa865be..357e9dfeb81ab 100644 --- a/node_modules/tuf-js/dist/models/delegations.d.ts +++ b/node_modules/@tufjs/models/dist/delegations.d.ts @@ -1,6 +1,6 @@ -import { JSONObject, JSONValue } from '../utils/types'; import { Key } from './key'; import { DelegatedRole, SuccinctRoles } from './role'; +import { JSONObject, JSONValue } from './utils'; type DelegatedRoleMap = Record; type KeyMap = Record; interface DelegationsOptions { diff --git a/node_modules/tuf-js/dist/models/delegations.js b/node_modules/@tufjs/models/dist/delegations.js similarity index 93% rename from node_modules/tuf-js/dist/models/delegations.js rename to node_modules/@tufjs/models/dist/delegations.js index 302bd52d8d885..7165f1e244393 100644 --- a/node_modules/tuf-js/dist/models/delegations.js +++ b/node_modules/@tufjs/models/dist/delegations.js @@ -5,10 +5,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Delegations = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); +const error_1 = require("./error"); const key_1 = require("./key"); const role_1 = require("./role"); +const utils_1 = require("./utils"); /** * A container object storing information about all delegations. * @@ -67,7 +67,7 @@ class Delegations { static fromJSON(data) { const { keys, roles, succinct_roles, ...unrecognizedFields } = data; let succinctRoles; - if ((0, guard_1.isObject)(succinct_roles)) { + if (utils_1.guard.isObject(succinct_roles)) { succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles); } return new Delegations({ @@ -89,7 +89,7 @@ function rolesToJSON(roles) { return Object.values(roles).map((role) => role.toJSON()); } function keysFromJSON(data) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('keys is malformed'); } return Object.entries(data).reduce((acc, [keyID, keyData]) => ({ @@ -99,8 +99,8 @@ function keysFromJSON(data) { } function rolesFromJSON(data) { let roleMap; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectArray)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectArray(data)) { throw new TypeError('roles is malformed'); } roleMap = data.reduce((acc, role) => { diff --git a/node_modules/@tufjs/models/dist/error.d.ts b/node_modules/@tufjs/models/dist/error.d.ts new file mode 100644 index 0000000000000..e03d05a381399 --- /dev/null +++ b/node_modules/@tufjs/models/dist/error.d.ts @@ -0,0 +1,12 @@ +export declare class ValueError extends Error { +} +export declare class RepositoryError extends Error { +} +export declare class UnsignedMetadataError extends RepositoryError { +} +export declare class LengthOrHashMismatchError extends RepositoryError { +} +export declare class CryptoError extends Error { +} +export declare class UnsupportedAlgorithmError extends CryptoError { +} diff --git a/node_modules/@tufjs/models/dist/error.js b/node_modules/@tufjs/models/dist/error.js new file mode 100644 index 0000000000000..ba80698747ba0 --- /dev/null +++ b/node_modules/@tufjs/models/dist/error.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0; +// An error about insufficient values +class ValueError extends Error { +} +exports.ValueError = ValueError; +// An error with a repository's state, such as a missing file. +// It covers all exceptions that come from the repository side when +// looking from the perspective of users of metadata API or ngclient. +class RepositoryError extends Error { +} +exports.RepositoryError = RepositoryError; +// An error about metadata object with insufficient threshold of signatures. +class UnsignedMetadataError extends RepositoryError { +} +exports.UnsignedMetadataError = UnsignedMetadataError; +// An error while checking the length and hash values of an object. +class LengthOrHashMismatchError extends RepositoryError { +} +exports.LengthOrHashMismatchError = LengthOrHashMismatchError; +class CryptoError extends Error { +} +exports.CryptoError = CryptoError; +class UnsupportedAlgorithmError extends CryptoError { +} +exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; diff --git a/node_modules/tuf-js/dist/models/file.d.ts b/node_modules/@tufjs/models/dist/file.d.ts similarity index 95% rename from node_modules/tuf-js/dist/models/file.d.ts rename to node_modules/@tufjs/models/dist/file.d.ts index 9678cf1efefd5..7abeb2bb03fb4 100644 --- a/node_modules/tuf-js/dist/models/file.d.ts +++ b/node_modules/@tufjs/models/dist/file.d.ts @@ -1,7 +1,7 @@ /// /// import { Readable } from 'stream'; -import { JSONObject, JSONValue } from '../utils/types'; +import { JSONObject, JSONValue } from './utils'; interface MetaFileOptions { version: number; length?: number; diff --git a/node_modules/tuf-js/dist/models/file.js b/node_modules/@tufjs/models/dist/file.js similarity index 95% rename from node_modules/tuf-js/dist/models/file.js rename to node_modules/@tufjs/models/dist/file.js index d6d535f6ca787..b35fe5950bbb7 100644 --- a/node_modules/tuf-js/dist/models/file.js +++ b/node_modules/@tufjs/models/dist/file.js @@ -6,8 +6,8 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.TargetFile = exports.MetaFile = void 0; const crypto_1 = __importDefault(require("crypto")); const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); +const error_1 = require("./error"); +const utils_1 = require("./utils"); // A container with information about a particular metadata file. // // This class is used for Timestamp and Snapshot metadata. @@ -75,10 +75,10 @@ class MetaFile { if (typeof version !== 'number') { throw new TypeError('version must be a number'); } - if ((0, guard_1.isDefined)(length) && typeof length !== 'number') { + if (utils_1.guard.isDefined(length) && typeof length !== 'number') { throw new TypeError('length must be a number'); } - if ((0, guard_1.isDefined)(hashes) && !(0, guard_1.isStringRecord)(hashes)) { + if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) { throw new TypeError('hashes must be string keys and values'); } return new MetaFile({ @@ -163,7 +163,7 @@ class TargetFile { if (typeof length !== 'number') { throw new TypeError('length must be a number'); } - if (!(0, guard_1.isStringRecord)(hashes)) { + if (!utils_1.guard.isStringRecord(hashes)) { throw new TypeError('hashes must have string keys and values'); } return new TargetFile({ diff --git a/node_modules/@tufjs/models/dist/index.d.ts b/node_modules/@tufjs/models/dist/index.d.ts new file mode 100644 index 0000000000000..f9768beaea200 --- /dev/null +++ b/node_modules/@tufjs/models/dist/index.d.ts @@ -0,0 +1,10 @@ +export { MetadataKind } from './base'; +export { ValueError } from './error'; +export { MetaFile, TargetFile } from './file'; +export { Key } from './key'; +export { Metadata } from './metadata'; +export { Root } from './root'; +export { Signature } from './signature'; +export { Snapshot } from './snapshot'; +export { Targets } from './targets'; +export { Timestamp } from './timestamp'; diff --git a/node_modules/@tufjs/models/dist/index.js b/node_modules/@tufjs/models/dist/index.js new file mode 100644 index 0000000000000..a4dc783659f04 --- /dev/null +++ b/node_modules/@tufjs/models/dist/index.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0; +var base_1 = require("./base"); +Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } }); +var file_1 = require("./file"); +Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } }); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); +var key_1 = require("./key"); +Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } }); +var metadata_1 = require("./metadata"); +Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); +var root_1 = require("./root"); +Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); +var signature_1 = require("./signature"); +Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } }); +var snapshot_1 = require("./snapshot"); +Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); +var targets_1 = require("./targets"); +Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); +var timestamp_1 = require("./timestamp"); +Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/node_modules/tuf-js/dist/models/key.d.ts b/node_modules/@tufjs/models/dist/key.d.ts similarity index 91% rename from node_modules/tuf-js/dist/models/key.d.ts rename to node_modules/@tufjs/models/dist/key.d.ts index 160407ae70ee3..9f90b7ee8969b 100644 --- a/node_modules/tuf-js/dist/models/key.d.ts +++ b/node_modules/@tufjs/models/dist/key.d.ts @@ -1,5 +1,5 @@ -import { JSONObject, JSONValue } from '../utils/types'; import { Signable } from './base'; +import { JSONObject, JSONValue } from './utils'; export interface KeyOptions { keyID: string; keyType: string; diff --git a/node_modules/tuf-js/dist/models/key.js b/node_modules/@tufjs/models/dist/key.js similarity index 68% rename from node_modules/tuf-js/dist/models/key.js rename to node_modules/@tufjs/models/dist/key.js index 33ff514fc178f..5e55b09d7c6dd 100644 --- a/node_modules/tuf-js/dist/models/key.js +++ b/node_modules/@tufjs/models/dist/key.js @@ -1,37 +1,13 @@ "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Key = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); -const key_1 = require("../utils/key"); -const signer = __importStar(require("../utils/signer")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const key_1 = require("./utils/key"); // A container class representing the public portion of a Key. class Key { constructor(options) { @@ -57,7 +33,7 @@ class Key { }); const signedData = metadata.signed.toJSON(); try { - if (!signer.verifySignature(signedData, publicKey, signature.sig)) { + if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) { throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); } } @@ -94,7 +70,7 @@ class Key { if (typeof scheme !== 'string') { throw new TypeError('scheme must be a string'); } - if (!(0, guard_1.isStringRecord)(keyval)) { + if (!utils_1.guard.isStringRecord(keyval)) { throw new TypeError('keyval must be a string record'); } return new Key({ diff --git a/node_modules/tuf-js/dist/models/metadata.d.ts b/node_modules/@tufjs/models/dist/metadata.d.ts similarity index 90% rename from node_modules/tuf-js/dist/models/metadata.d.ts rename to node_modules/@tufjs/models/dist/metadata.d.ts index 39abf03406449..55c9294a296eb 100644 --- a/node_modules/tuf-js/dist/models/metadata.d.ts +++ b/node_modules/@tufjs/models/dist/metadata.d.ts @@ -1,10 +1,11 @@ -import { JSONObject, JSONValue, MetadataKind } from '../utils/types'; -import { Signable } from './base'; +/// +import { MetadataKind, Signable } from './base'; import { Root } from './root'; import { Signature } from './signature'; import { Snapshot } from './snapshot'; import { Targets } from './targets'; import { Timestamp } from './timestamp'; +import { JSONObject, JSONValue } from './utils'; type MetadataType = Root | Timestamp | Snapshot | Targets; /*** * A container for signed TUF metadata. @@ -35,8 +36,10 @@ export declare class Metadata implements Signable { signatures: Record; unrecognizedFields: Record; constructor(signed: T, signatures?: Record, unrecognizedFields?: Record); + sign(signer: (data: Buffer) => Signature, append?: boolean): void; verifyDelegate(delegatedRole: string, delegatedMetadata: Metadata): void; equals(other: T): boolean; + toJSON(): JSONObject; static fromJSON(type: MetadataKind.Root, data: JSONObject): Metadata; static fromJSON(type: MetadataKind.Timestamp, data: JSONObject): Metadata; static fromJSON(type: MetadataKind.Snapshot, data: JSONObject): Metadata; diff --git a/node_modules/tuf-js/dist/models/metadata.js b/node_modules/@tufjs/models/dist/metadata.js similarity index 82% rename from node_modules/tuf-js/dist/models/metadata.js rename to node_modules/@tufjs/models/dist/metadata.js index 11c3c546822ac..945d3a42a7cfb 100644 --- a/node_modules/tuf-js/dist/models/metadata.js +++ b/node_modules/@tufjs/models/dist/metadata.js @@ -5,14 +5,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Metadata = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); +const base_1 = require("./base"); +const error_1 = require("./error"); const root_1 = require("./root"); const signature_1 = require("./signature"); const snapshot_1 = require("./snapshot"); const targets_1 = require("./targets"); const timestamp_1 = require("./timestamp"); +const utils_1 = require("./utils"); +const json_1 = require("./utils/json"); /*** * A container for signed TUF metadata. * @@ -43,15 +44,23 @@ class Metadata { this.signatures = signatures || {}; this.unrecognizedFields = unrecognizedFields || {}; } + sign(signer, append = true) { + const bytes = (0, json_1.canonicalize)(this.signed.toJSON()); + const signature = signer(bytes); + if (!append) { + this.signatures = {}; + } + this.signatures[signature.keyID] = signature; + } verifyDelegate(delegatedRole, delegatedMetadata) { let role; let keys = {}; switch (this.signed.type) { - case types_1.MetadataKind.Root: + case base_1.MetadataKind.Root: keys = this.signed.keys; role = this.signed.roles[delegatedRole]; break; - case types_1.MetadataKind.Targets: + case base_1.MetadataKind.Targets: if (!this.signed.delegations) { throw new error_1.ValueError(`No delegations found for ${delegatedRole}`); } @@ -98,9 +107,19 @@ class Metadata { util_1.default.isDeepStrictEqual(this.signatures, other.signatures) && util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); } + toJSON() { + const signatures = Object.values(this.signatures).map((signature) => { + return signature.toJSON(); + }); + return { + signatures, + signed: this.signed.toJSON(), + ...this.unrecognizedFields, + }; + } static fromJSON(type, data) { const { signed, signatures, ...rest } = data; - if (!(0, guard_1.isDefined)(signed) || !(0, guard_1.isObject)(signed)) { + if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) { throw new TypeError('signed is not defined'); } if (type !== signed._type) { @@ -108,16 +127,16 @@ class Metadata { } let signedObj; switch (type) { - case types_1.MetadataKind.Root: + case base_1.MetadataKind.Root: signedObj = root_1.Root.fromJSON(signed); break; - case types_1.MetadataKind.Timestamp: + case base_1.MetadataKind.Timestamp: signedObj = timestamp_1.Timestamp.fromJSON(signed); break; - case types_1.MetadataKind.Snapshot: + case base_1.MetadataKind.Snapshot: signedObj = snapshot_1.Snapshot.fromJSON(signed); break; - case types_1.MetadataKind.Targets: + case base_1.MetadataKind.Targets: signedObj = targets_1.Targets.fromJSON(signed); break; default: @@ -129,7 +148,7 @@ class Metadata { } exports.Metadata = Metadata; function signaturesFromJSON(data) { - if (!(0, guard_1.isObjectArray)(data)) { + if (!utils_1.guard.isObjectArray(data)) { throw new TypeError('signatures is not an array'); } return data.reduce((acc, sigData) => { diff --git a/node_modules/tuf-js/dist/models/role.d.ts b/node_modules/@tufjs/models/dist/role.d.ts similarity index 98% rename from node_modules/tuf-js/dist/models/role.d.ts rename to node_modules/@tufjs/models/dist/role.d.ts index 4575300fb972f..b3a6efae2cac3 100644 --- a/node_modules/tuf-js/dist/models/role.d.ts +++ b/node_modules/@tufjs/models/dist/role.d.ts @@ -1,4 +1,4 @@ -import { JSONObject, JSONValue } from '../utils/types'; +import { JSONObject, JSONValue } from './utils'; export declare const TOP_LEVEL_ROLE_NAMES: string[]; export interface RoleOptions { keyIDs: string[]; diff --git a/node_modules/tuf-js/dist/models/role.js b/node_modules/@tufjs/models/dist/role.js similarity index 96% rename from node_modules/tuf-js/dist/models/role.js rename to node_modules/@tufjs/models/dist/role.js index da80a09b8b09f..143c5dc608966 100644 --- a/node_modules/tuf-js/dist/models/role.js +++ b/node_modules/@tufjs/models/dist/role.js @@ -7,8 +7,8 @@ exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL const crypto_1 = __importDefault(require("crypto")); const minimatch_1 = __importDefault(require("minimatch")); const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); +const error_1 = require("./error"); +const utils_1 = require("./utils"); exports.TOP_LEVEL_ROLE_NAMES = [ 'root', 'targets', @@ -51,7 +51,7 @@ class Role { } static fromJSON(data) { const { keyids, threshold, ...rest } = data; - if (!(0, guard_1.isStringArray)(keyids)) { + if (!utils_1.guard.isStringArray(keyids)) { throw new TypeError('keyids must be an array'); } if (typeof threshold !== 'number') { @@ -128,7 +128,7 @@ class DelegatedRole extends Role { } static fromJSON(data) { const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data; - if (!(0, guard_1.isStringArray)(keyids)) { + if (!utils_1.guard.isStringArray(keyids)) { throw new TypeError('keyids must be an array of strings'); } if (typeof threshold !== 'number') { @@ -140,10 +140,11 @@ class DelegatedRole extends Role { if (typeof terminating !== 'boolean') { throw new TypeError('terminating must be a boolean'); } - if ((0, guard_1.isDefined)(paths) && !(0, guard_1.isStringArray)(paths)) { + if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) { throw new TypeError('paths must be an array of strings'); } - if ((0, guard_1.isDefined)(path_hash_prefixes) && !(0, guard_1.isStringArray)(path_hash_prefixes)) { + if (utils_1.guard.isDefined(path_hash_prefixes) && + !utils_1.guard.isStringArray(path_hash_prefixes)) { throw new TypeError('path_hash_prefixes must be an array of strings'); } return new DelegatedRole({ @@ -274,7 +275,7 @@ class SuccinctRoles extends Role { } static fromJSON(data) { const { keyids, threshold, bit_length, name_prefix, ...rest } = data; - if (!(0, guard_1.isStringArray)(keyids)) { + if (!utils_1.guard.isStringArray(keyids)) { throw new TypeError('keyids must be an array of strings'); } if (typeof threshold !== 'number') { diff --git a/node_modules/tuf-js/dist/models/root.d.ts b/node_modules/@tufjs/models/dist/root.d.ts similarity index 85% rename from node_modules/tuf-js/dist/models/root.d.ts rename to node_modules/@tufjs/models/dist/root.d.ts index 66356628f4b8a..eb5eb8dede98b 100644 --- a/node_modules/tuf-js/dist/models/root.d.ts +++ b/node_modules/@tufjs/models/dist/root.d.ts @@ -1,7 +1,7 @@ -import { JSONObject, MetadataKind } from '../utils/types'; -import { Signed, SignedOptions } from './base'; +import { MetadataKind, Signed, SignedOptions } from './base'; import { Key } from './key'; import { Role } from './role'; +import { JSONObject } from './utils'; type KeyMap = Record; type RoleMap = Record; export interface RootOptions extends SignedOptions { @@ -21,6 +21,7 @@ export declare class Root extends Signed { readonly roles: RoleMap; readonly consistentSnapshot: boolean; constructor(options: RootOptions); + addKey(key: Key, role: string): void; equals(other: Root): boolean; toJSON(): JSONObject; static fromJSON(data: JSONObject): Root; diff --git a/node_modules/tuf-js/dist/models/root.js b/node_modules/@tufjs/models/dist/root.js similarity index 84% rename from node_modules/tuf-js/dist/models/root.js rename to node_modules/@tufjs/models/dist/root.js index 574ec1acdcc39..36d0ef0f186d1 100644 --- a/node_modules/tuf-js/dist/models/root.js +++ b/node_modules/@tufjs/models/dist/root.js @@ -5,12 +5,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Root = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); const base_1 = require("./base"); +const error_1 = require("./error"); const key_1 = require("./key"); const role_1 = require("./role"); +const utils_1 = require("./utils"); /** * A container for the signed part of root metadata. * @@ -20,7 +19,7 @@ const role_1 = require("./role"); class Root extends base_1.Signed { constructor(options) { super(options); - this.type = types_1.MetadataKind.Root; + this.type = base_1.MetadataKind.Root; this.keys = options.keys || {}; this.consistentSnapshot = options.consistentSnapshot ?? true; if (!options.roles) { @@ -37,6 +36,15 @@ class Root extends base_1.Signed { this.roles = options.roles; } } + addKey(key, role) { + if (!this.roles[role]) { + throw new error_1.ValueError(`role ${role} does not exist`); + } + if (!this.roles[role].keyIDs.includes(key.keyID)) { + this.roles[role].keyIDs.push(key.keyID); + } + this.keys[key.keyID] = key; + } equals(other) { if (!(other instanceof Root)) { return false; @@ -48,6 +56,7 @@ class Root extends base_1.Signed { } toJSON() { return { + _type: this.type, spec_version: this.specVersion, version: this.version, expires: this.expires, @@ -81,8 +90,8 @@ function rolesToJSON(roles) { } function keysFromJSON(data) { let keys; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('keys must be an object'); } keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({ @@ -94,8 +103,8 @@ function keysFromJSON(data) { } function rolesFromJSON(data) { let roles; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('roles must be an object'); } roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({ diff --git a/node_modules/tuf-js/dist/models/signature.d.ts b/node_modules/@tufjs/models/dist/signature.d.ts similarity index 88% rename from node_modules/tuf-js/dist/models/signature.d.ts rename to node_modules/@tufjs/models/dist/signature.d.ts index 1d78e2d8e55d0..dbeabbef87717 100644 --- a/node_modules/tuf-js/dist/models/signature.d.ts +++ b/node_modules/@tufjs/models/dist/signature.d.ts @@ -1,4 +1,4 @@ -import { JSONObject } from '../utils/types'; +import { JSONObject } from './utils'; export interface SignatureOptions { keyID: string; sig: string; @@ -15,5 +15,6 @@ export declare class Signature { readonly keyID: string; readonly sig: string; constructor(options: SignatureOptions); + toJSON(): JSONObject; static fromJSON(data: JSONObject): Signature; } diff --git a/node_modules/tuf-js/dist/models/signature.js b/node_modules/@tufjs/models/dist/signature.js similarity index 89% rename from node_modules/tuf-js/dist/models/signature.js rename to node_modules/@tufjs/models/dist/signature.js index 9550fa7b551fc..33eb204eb0835 100644 --- a/node_modules/tuf-js/dist/models/signature.js +++ b/node_modules/@tufjs/models/dist/signature.js @@ -15,6 +15,12 @@ class Signature { this.keyID = keyID; this.sig = sig; } + toJSON() { + return { + keyid: this.keyID, + sig: this.sig, + }; + } static fromJSON(data) { const { keyid, sig } = data; if (typeof keyid !== 'string') { diff --git a/node_modules/tuf-js/dist/models/snapshot.d.ts b/node_modules/@tufjs/models/dist/snapshot.d.ts similarity index 87% rename from node_modules/tuf-js/dist/models/snapshot.d.ts rename to node_modules/@tufjs/models/dist/snapshot.d.ts index 79bc07359509b..bcc780aee0977 100644 --- a/node_modules/tuf-js/dist/models/snapshot.d.ts +++ b/node_modules/@tufjs/models/dist/snapshot.d.ts @@ -1,6 +1,6 @@ -import { JSONObject, MetadataKind } from '../utils/types'; -import { Signed, SignedOptions } from './base'; +import { MetadataKind, Signed, SignedOptions } from './base'; import { MetaFile } from './file'; +import { JSONObject } from './utils'; type MetaFileMap = Record; export interface SnapshotOptions extends SignedOptions { meta?: MetaFileMap; diff --git a/node_modules/tuf-js/dist/models/snapshot.js b/node_modules/@tufjs/models/dist/snapshot.js similarity index 89% rename from node_modules/tuf-js/dist/models/snapshot.js rename to node_modules/@tufjs/models/dist/snapshot.js index 0945a28cd03cc..e90ea8e729e4e 100644 --- a/node_modules/tuf-js/dist/models/snapshot.js +++ b/node_modules/@tufjs/models/dist/snapshot.js @@ -5,10 +5,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Snapshot = void 0; const util_1 = __importDefault(require("util")); -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); const base_1 = require("./base"); const file_1 = require("./file"); +const utils_1 = require("./utils"); /** * A container for the signed part of snapshot metadata. * @@ -19,7 +18,7 @@ const file_1 = require("./file"); class Snapshot extends base_1.Signed { constructor(opts) { super(opts); - this.type = types_1.MetadataKind.Snapshot; + this.type = base_1.MetadataKind.Snapshot; this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) }; } equals(other) { @@ -30,6 +29,7 @@ class Snapshot extends base_1.Signed { } toJSON() { return { + _type: this.type, meta: metaToJSON(this.meta), spec_version: this.specVersion, version: this.version, @@ -56,8 +56,8 @@ function metaToJSON(meta) { } function metaFromJSON(data) { let meta; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('meta field is malformed'); } else { @@ -66,6 +66,6 @@ function metaFromJSON(data) { [path]: file_1.MetaFile.fromJSON(metadata), }), {}); } - return meta; } + return meta; } diff --git a/node_modules/tuf-js/dist/models/targets.d.ts b/node_modules/@tufjs/models/dist/targets.d.ts similarity index 80% rename from node_modules/tuf-js/dist/models/targets.d.ts rename to node_modules/@tufjs/models/dist/targets.d.ts index 24dba9ac71580..442f9e44391b4 100644 --- a/node_modules/tuf-js/dist/models/targets.d.ts +++ b/node_modules/@tufjs/models/dist/targets.d.ts @@ -1,7 +1,7 @@ -import { JSONObject, MetadataKind } from '../utils/types'; -import { Signed, SignedOptions } from './base'; +import { MetadataKind, Signed, SignedOptions } from './base'; import { Delegations } from './delegations'; import { TargetFile } from './file'; +import { JSONObject } from './utils'; type TargetFileMap = Record; interface TargetsOptions extends SignedOptions { targets?: TargetFileMap; @@ -12,6 +12,7 @@ export declare class Targets extends Signed { readonly targets: TargetFileMap; readonly delegations?: Delegations; constructor(options: TargetsOptions); + addTarget(target: TargetFile): void; equals(other: Targets): boolean; toJSON(): JSONObject; static fromJSON(data: JSONObject): Targets; diff --git a/node_modules/tuf-js/dist/models/targets.js b/node_modules/@tufjs/models/dist/targets.js similarity index 88% rename from node_modules/tuf-js/dist/models/targets.js rename to node_modules/@tufjs/models/dist/targets.js index 90a2528764708..54bd8f8c554af 100644 --- a/node_modules/tuf-js/dist/models/targets.js +++ b/node_modules/@tufjs/models/dist/targets.js @@ -5,11 +5,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Targets = void 0; const util_1 = __importDefault(require("util")); -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); const base_1 = require("./base"); const delegations_1 = require("./delegations"); const file_1 = require("./file"); +const utils_1 = require("./utils"); // Container for the signed part of targets metadata. // // Targets contains verifying information about target files and also delegates @@ -17,10 +16,13 @@ const file_1 = require("./file"); class Targets extends base_1.Signed { constructor(options) { super(options); - this.type = types_1.MetadataKind.Targets; + this.type = base_1.MetadataKind.Targets; this.targets = options.targets || {}; this.delegations = options.delegations; } + addTarget(target) { + this.targets[target.path] = target; + } equals(other) { if (!(other instanceof Targets)) { return false; @@ -31,6 +33,7 @@ class Targets extends base_1.Signed { } toJSON() { const json = { + _type: this.type, spec_version: this.specVersion, version: this.version, expires: this.expires, @@ -62,8 +65,8 @@ function targetsToJSON(targets) { } function targetsFromJSON(data) { let targets; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('targets must be an object'); } else { @@ -77,8 +80,8 @@ function targetsFromJSON(data) { } function delegationsFromJSON(data) { let delegations; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObject)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObject(data)) { throw new TypeError('delegations must be an object'); } else { diff --git a/node_modules/tuf-js/dist/models/timestamp.d.ts b/node_modules/@tufjs/models/dist/timestamp.d.ts similarity index 85% rename from node_modules/tuf-js/dist/models/timestamp.d.ts rename to node_modules/@tufjs/models/dist/timestamp.d.ts index 481ada8e238d5..9ab012b8912a3 100644 --- a/node_modules/tuf-js/dist/models/timestamp.d.ts +++ b/node_modules/@tufjs/models/dist/timestamp.d.ts @@ -1,6 +1,6 @@ -import { JSONObject, MetadataKind } from '../utils/types'; -import { Signed, SignedOptions } from './base'; +import { MetadataKind, Signed, SignedOptions } from './base'; import { MetaFile } from './file'; +import { JSONObject } from './utils'; interface TimestampOptions extends SignedOptions { snapshotMeta?: MetaFile; } diff --git a/node_modules/tuf-js/dist/models/timestamp.js b/node_modules/@tufjs/models/dist/timestamp.js similarity index 86% rename from node_modules/tuf-js/dist/models/timestamp.js rename to node_modules/@tufjs/models/dist/timestamp.js index 84f681b68d16a..9880c4c9fc254 100644 --- a/node_modules/tuf-js/dist/models/timestamp.js +++ b/node_modules/@tufjs/models/dist/timestamp.js @@ -1,10 +1,9 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.Timestamp = void 0; -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); const base_1 = require("./base"); const file_1 = require("./file"); +const utils_1 = require("./utils"); /** * A container for the signed part of timestamp metadata. * @@ -14,7 +13,7 @@ const file_1 = require("./file"); class Timestamp extends base_1.Signed { constructor(options) { super(options); - this.type = types_1.MetadataKind.Timestamp; + this.type = base_1.MetadataKind.Timestamp; this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 }); } equals(other) { @@ -25,6 +24,7 @@ class Timestamp extends base_1.Signed { } toJSON() { return { + _type: this.type, spec_version: this.specVersion, version: this.version, expires: this.expires, @@ -45,9 +45,9 @@ class Timestamp extends base_1.Signed { exports.Timestamp = Timestamp; function snapshotMetaFromJSON(data) { let snapshotMeta; - if ((0, guard_1.isDefined)(data)) { + if (utils_1.guard.isDefined(data)) { const snapshotData = data['snapshot.json']; - if (!(0, guard_1.isDefined)(snapshotData) || !(0, guard_1.isObject)(snapshotData)) { + if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) { throw new TypeError('missing snapshot.json in meta'); } else { diff --git a/node_modules/tuf-js/dist/utils/guard.d.ts b/node_modules/@tufjs/models/dist/utils/guard.d.ts similarity index 78% rename from node_modules/tuf-js/dist/utils/guard.d.ts rename to node_modules/@tufjs/models/dist/utils/guard.d.ts index 17bc4ce3c7ea5..60c80e160752e 100644 --- a/node_modules/tuf-js/dist/utils/guard.d.ts +++ b/node_modules/@tufjs/models/dist/utils/guard.d.ts @@ -1,8 +1,7 @@ -import { JSONObject, MetadataKind } from './types'; +import { JSONObject } from './types'; export declare function isDefined(val: T | undefined): val is T; export declare function isObject(value: unknown): value is JSONObject; export declare function isStringArray(value: unknown): value is string[]; export declare function isObjectArray(value: unknown): value is JSONObject[]; export declare function isStringRecord(value: unknown): value is Record; export declare function isObjectRecord(value: unknown): value is Record; -export declare function isMetadataKind(value: unknown): value is MetadataKind; diff --git a/node_modules/tuf-js/dist/utils/guard.js b/node_modules/@tufjs/models/dist/utils/guard.js similarity index 74% rename from node_modules/tuf-js/dist/utils/guard.js rename to node_modules/@tufjs/models/dist/utils/guard.js index f2207af18690a..efe558852303c 100644 --- a/node_modules/tuf-js/dist/utils/guard.js +++ b/node_modules/@tufjs/models/dist/utils/guard.js @@ -1,7 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.isMetadataKind = exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0; -const types_1 = require("./types"); +exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0; function isDefined(val) { return val !== undefined; } @@ -32,8 +31,3 @@ function isObjectRecord(value) { Object.values(value).every((v) => typeof v === 'object' && v !== null)); } exports.isObjectRecord = isObjectRecord; -function isMetadataKind(value) { - return (typeof value === 'string' && - Object.values(types_1.MetadataKind).includes(value)); -} -exports.isMetadataKind = isMetadataKind; diff --git a/node_modules/@tufjs/models/dist/utils/index.d.ts b/node_modules/@tufjs/models/dist/utils/index.d.ts new file mode 100644 index 0000000000000..7dbbd1eeecfdf --- /dev/null +++ b/node_modules/@tufjs/models/dist/utils/index.d.ts @@ -0,0 +1,3 @@ +export * as guard from './guard'; +export { JSONObject, JSONValue } from './types'; +export * as crypto from './verify'; diff --git a/node_modules/tuf-js/dist/utils/index.js b/node_modules/@tufjs/models/dist/utils/index.js similarity index 79% rename from node_modules/tuf-js/dist/utils/index.js rename to node_modules/@tufjs/models/dist/utils/index.js index 604696a30565b..872aae28049c9 100644 --- a/node_modules/tuf-js/dist/utils/index.js +++ b/node_modules/@tufjs/models/dist/utils/index.js @@ -23,9 +23,6 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.types = exports.signer = exports.json = exports.guard = exports.config = void 0; -exports.config = __importStar(require("./config")); +exports.crypto = exports.guard = void 0; exports.guard = __importStar(require("./guard")); -exports.json = __importStar(require("./json")); -exports.signer = __importStar(require("./signer")); -exports.types = __importStar(require("./types")); +exports.crypto = __importStar(require("./verify")); diff --git a/node_modules/tuf-js/dist/utils/json.d.ts b/node_modules/@tufjs/models/dist/utils/json.d.ts similarity index 100% rename from node_modules/tuf-js/dist/utils/json.d.ts rename to node_modules/@tufjs/models/dist/utils/json.d.ts diff --git a/node_modules/tuf-js/dist/utils/json.js b/node_modules/@tufjs/models/dist/utils/json.js similarity index 100% rename from node_modules/tuf-js/dist/utils/json.js rename to node_modules/@tufjs/models/dist/utils/json.js diff --git a/node_modules/tuf-js/dist/utils/key.d.ts b/node_modules/@tufjs/models/dist/utils/key.d.ts similarity index 100% rename from node_modules/tuf-js/dist/utils/key.d.ts rename to node_modules/@tufjs/models/dist/utils/key.d.ts diff --git a/node_modules/tuf-js/dist/utils/key.js b/node_modules/@tufjs/models/dist/utils/key.js similarity index 100% rename from node_modules/tuf-js/dist/utils/key.js rename to node_modules/@tufjs/models/dist/utils/key.js diff --git a/node_modules/tuf-js/dist/utils/oid.d.ts b/node_modules/@tufjs/models/dist/utils/oid.d.ts similarity index 100% rename from node_modules/tuf-js/dist/utils/oid.d.ts rename to node_modules/@tufjs/models/dist/utils/oid.d.ts diff --git a/node_modules/tuf-js/dist/utils/oid.js b/node_modules/@tufjs/models/dist/utils/oid.js similarity index 100% rename from node_modules/tuf-js/dist/utils/oid.js rename to node_modules/@tufjs/models/dist/utils/oid.js diff --git a/node_modules/tuf-js/dist/utils/types.d.ts b/node_modules/@tufjs/models/dist/utils/types.d.ts similarity index 51% rename from node_modules/tuf-js/dist/utils/types.d.ts rename to node_modules/@tufjs/models/dist/utils/types.d.ts index 24319ddf7bb6b..dd3964ec571e2 100644 --- a/node_modules/tuf-js/dist/utils/types.d.ts +++ b/node_modules/@tufjs/models/dist/utils/types.d.ts @@ -1,9 +1,3 @@ -export declare enum MetadataKind { - Root = "root", - Timestamp = "timestamp", - Snapshot = "snapshot", - Targets = "targets" -} export type JSONObject = { [key: string]: JSONValue; }; diff --git a/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/@tufjs/models/dist/utils/types.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/@tufjs/models/dist/utils/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/tuf-js/dist/utils/signer.d.ts b/node_modules/@tufjs/models/dist/utils/verify.d.ts similarity index 100% rename from node_modules/tuf-js/dist/utils/signer.d.ts rename to node_modules/@tufjs/models/dist/utils/verify.d.ts diff --git a/node_modules/tuf-js/dist/utils/signer.js b/node_modules/@tufjs/models/dist/utils/verify.js similarity index 100% rename from node_modules/tuf-js/dist/utils/signer.js rename to node_modules/@tufjs/models/dist/utils/verify.js diff --git a/node_modules/@tufjs/models/package.json b/node_modules/@tufjs/models/package.json new file mode 100644 index 0000000000000..f3746c27041fd --- /dev/null +++ b/node_modules/@tufjs/models/package.json @@ -0,0 +1,41 @@ +{ + "name": "@tufjs/models", + "version": "1.0.0", + "description": "TUF metadata models", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --build", + "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", + "test": "jest" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "keywords": [ + "tuf", + "security", + "update" + ], + "author": "bdehamer@github.com", + "license": "MIT", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/packages/models#readme", + "devDependencies": { + "@types/minimatch": "^5.1.2", + "@types/node": "^18.14.1", + "typescript": "^4.9.5" + }, + "dependencies": { + "minimatch": "^6.1.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/agentkeepalive/History.md b/node_modules/agentkeepalive/History.md index 9db6e5576d42b..a08e98e0bd504 100644 --- a/node_modules/agentkeepalive/History.md +++ b/node_modules/agentkeepalive/History.md @@ -1,4 +1,12 @@ +4.3.0 / 2023-03-06 +================== + +**others** + * [[`6f9852b`](http://github.com/node-modules/agentkeepalive/commit/6f9852bf6f674846103e403fd9c84e92fc24f820)] - deps: depd@2.0.0 (#109) (Brian DeHamer <>) + * [[`fd4bd9b`](http://github.com/node-modules/agentkeepalive/commit/fd4bd9b0e0f051de3cb49559d1b0d534a0ded18c)] - test: use npm install (#110) (fengmk2 <>) + * [[`d52822c`](http://github.com/node-modules/agentkeepalive/commit/d52822c1243c689df1c8232a3bb14139cf87fae5)] - chore: update contributors (fengmk2 <>) + 4.2.1 / 2022-02-21 ================== diff --git a/node_modules/agentkeepalive/package.json b/node_modules/agentkeepalive/package.json index efa561d2c6d6c..3115fee69a041 100644 --- a/node_modules/agentkeepalive/package.json +++ b/node_modules/agentkeepalive/package.json @@ -1,6 +1,6 @@ { "name": "agentkeepalive", - "version": "4.2.1", + "version": "4.3.0", "description": "Missing keepalive http.Agent", "main": "index.js", "browser": "browser.js", @@ -11,12 +11,12 @@ "lib" ], "scripts": { + "contributor": "git-contributor", "test": "npm run lint && egg-bin test --full-trace", "test-local": "egg-bin test --full-trace", "cov": "cross-env DEBUG=agentkeepalive egg-bin cov --full-trace", "ci": "npm run lint && npm run cov", - "lint": "eslint lib test index.js", - "autod": "autod" + "lint": "eslint lib test index.js" }, "repository": { "type": "git", @@ -36,17 +36,16 @@ ], "dependencies": { "debug": "^4.1.0", - "depd": "^1.1.2", + "depd": "^2.0.0", "humanize-ms": "^1.2.1" }, "devDependencies": { - "autod": "^3.0.1", "coffee": "^5.3.0", "cross-env": "^6.0.3", "egg-bin": "^4.9.0", - "egg-ci": "^1.10.0", "eslint": "^5.7.0", "eslint-config-egg": "^7.1.0", + "git-contributor": "^2.0.0", "mm": "^2.4.1", "pedding": "^1.1.0", "typescript": "^3.8.3" @@ -54,13 +53,6 @@ "engines": { "node": ">= 8.0.0" }, - "ci": { - "type": "github", - "os": { - "github": "linux" - }, - "version": "8, 10, 12, 14, 16" - }, - "author": "fengmk2 (https://fengmk2.com)", + "author": "fengmk2 (https://github.com/fengmk2)", "license": "MIT" } diff --git a/node_modules/depd/History.md b/node_modules/depd/History.md index 507ecb8de21d5..cd9ebaaa9963f 100644 --- a/node_modules/depd/History.md +++ b/node_modules/depd/History.md @@ -1,3 +1,10 @@ +2.0.0 / 2018-10-26 +================== + + * Drop support for Node.js 0.6 + * Replace internal `eval` usage with `Function` constructor + * Use instance methods on `process` to check for listeners + 1.1.2 / 2018-01-11 ================== diff --git a/node_modules/depd/LICENSE b/node_modules/depd/LICENSE index 84441fbb57092..248de7af2bd16 100644 --- a/node_modules/depd/LICENSE +++ b/node_modules/depd/LICENSE @@ -1,6 +1,6 @@ (The MIT License) -Copyright (c) 2014-2017 Douglas Christopher Wilson +Copyright (c) 2014-2018 Douglas Christopher Wilson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/node_modules/depd/index.js b/node_modules/depd/index.js index d758d3c8f58a6..1bf2fcfdeffc9 100644 --- a/node_modules/depd/index.js +++ b/node_modules/depd/index.js @@ -1,6 +1,6 @@ /*! * depd - * Copyright(c) 2014-2017 Douglas Christopher Wilson + * Copyright(c) 2014-2018 Douglas Christopher Wilson * MIT Licensed */ @@ -8,8 +8,6 @@ * Module dependencies. */ -var callSiteToString = require('./lib/compat').callSiteToString -var eventListenerCount = require('./lib/compat').eventListenerCount var relative = require('path').relative /** @@ -92,7 +90,7 @@ function createStackString (stack) { } for (var i = 0; i < stack.length; i++) { - str += '\n at ' + callSiteToString(stack[i]) + str += '\n at ' + stack[i].toString() } return str @@ -128,12 +126,31 @@ function depd (namespace) { return deprecate } +/** + * Determine if event emitter has listeners of a given type. + * + * The way to do this check is done three different ways in Node.js >= 0.8 + * so this consolidates them into a minimal set using instance methods. + * + * @param {EventEmitter} emitter + * @param {string} type + * @returns {boolean} + * @private + */ + +function eehaslisteners (emitter, type) { + var count = typeof emitter.listenerCount !== 'function' + ? emitter.listeners(type).length + : emitter.listenerCount(type) + + return count > 0 +} + /** * Determine if namespace is ignored. */ function isignored (namespace) { - /* istanbul ignore next: tested in a child processs */ if (process.noDeprecation) { // --no-deprecation support return true @@ -150,7 +167,6 @@ function isignored (namespace) { */ function istraced (namespace) { - /* istanbul ignore next: tested in a child processs */ if (process.traceDeprecation) { // --trace-deprecation support return true @@ -167,7 +183,7 @@ function istraced (namespace) { */ function log (message, site) { - var haslisteners = eventListenerCount(process, 'deprecation') !== 0 + var haslisteners = eehaslisteners(process, 'deprecation') // abort early if no destination if (!haslisteners && this._ignored) { @@ -310,7 +326,7 @@ function formatPlain (msg, caller, stack) { // add stack trace if (this._traced) { for (var i = 0; i < stack.length; i++) { - formatted += '\n at ' + callSiteToString(stack[i]) + formatted += '\n at ' + stack[i].toString() } return formatted @@ -335,7 +351,7 @@ function formatColor (msg, caller, stack) { // add stack trace if (this._traced) { for (var i = 0; i < stack.length; i++) { - formatted += '\n \x1b[36mat ' + callSiteToString(stack[i]) + '\x1b[39m' // cyan + formatted += '\n \x1b[36mat ' + stack[i].toString() + '\x1b[39m' // cyan } return formatted @@ -400,18 +416,18 @@ function wrapfunction (fn, message) { } var args = createArgumentsString(fn.length) - var deprecate = this // eslint-disable-line no-unused-vars var stack = getStack() var site = callSiteLocation(stack[1]) site.name = fn.name - // eslint-disable-next-line no-eval - var deprecatedfn = eval('(function (' + args + ') {\n' + + // eslint-disable-next-line no-new-func + var deprecatedfn = new Function('fn', 'log', 'deprecate', 'message', 'site', '"use strict"\n' + + 'return function (' + args + ') {' + 'log.call(deprecate, message, site)\n' + 'return fn.apply(this, arguments)\n' + - '})') + '}')(fn, log, this, message, site) return deprecatedfn } diff --git a/node_modules/depd/lib/compat/callsite-tostring.js b/node_modules/depd/lib/compat/callsite-tostring.js deleted file mode 100644 index 73186dc644a36..0000000000000 --- a/node_modules/depd/lib/compat/callsite-tostring.js +++ /dev/null @@ -1,103 +0,0 @@ -/*! - * depd - * Copyright(c) 2014 Douglas Christopher Wilson - * MIT Licensed - */ - -'use strict' - -/** - * Module exports. - */ - -module.exports = callSiteToString - -/** - * Format a CallSite file location to a string. - */ - -function callSiteFileLocation (callSite) { - var fileName - var fileLocation = '' - - if (callSite.isNative()) { - fileLocation = 'native' - } else if (callSite.isEval()) { - fileName = callSite.getScriptNameOrSourceURL() - if (!fileName) { - fileLocation = callSite.getEvalOrigin() - } - } else { - fileName = callSite.getFileName() - } - - if (fileName) { - fileLocation += fileName - - var lineNumber = callSite.getLineNumber() - if (lineNumber != null) { - fileLocation += ':' + lineNumber - - var columnNumber = callSite.getColumnNumber() - if (columnNumber) { - fileLocation += ':' + columnNumber - } - } - } - - return fileLocation || 'unknown source' -} - -/** - * Format a CallSite to a string. - */ - -function callSiteToString (callSite) { - var addSuffix = true - var fileLocation = callSiteFileLocation(callSite) - var functionName = callSite.getFunctionName() - var isConstructor = callSite.isConstructor() - var isMethodCall = !(callSite.isToplevel() || isConstructor) - var line = '' - - if (isMethodCall) { - var methodName = callSite.getMethodName() - var typeName = getConstructorName(callSite) - - if (functionName) { - if (typeName && functionName.indexOf(typeName) !== 0) { - line += typeName + '.' - } - - line += functionName - - if (methodName && functionName.lastIndexOf('.' + methodName) !== functionName.length - methodName.length - 1) { - line += ' [as ' + methodName + ']' - } - } else { - line += typeName + '.' + (methodName || '') - } - } else if (isConstructor) { - line += 'new ' + (functionName || '') - } else if (functionName) { - line += functionName - } else { - addSuffix = false - line += fileLocation - } - - if (addSuffix) { - line += ' (' + fileLocation + ')' - } - - return line -} - -/** - * Get constructor name of reviver. - */ - -function getConstructorName (obj) { - var receiver = obj.receiver - return (receiver.constructor && receiver.constructor.name) || null -} diff --git a/node_modules/depd/lib/compat/event-listener-count.js b/node_modules/depd/lib/compat/event-listener-count.js deleted file mode 100644 index 3a8925d136ae6..0000000000000 --- a/node_modules/depd/lib/compat/event-listener-count.js +++ /dev/null @@ -1,22 +0,0 @@ -/*! - * depd - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ - -'use strict' - -/** - * Module exports. - * @public - */ - -module.exports = eventListenerCount - -/** - * Get the count of listeners on an event emitter of a specific type. - */ - -function eventListenerCount (emitter, type) { - return emitter.listeners(type).length -} diff --git a/node_modules/depd/lib/compat/index.js b/node_modules/depd/lib/compat/index.js deleted file mode 100644 index 955b3336b25e7..0000000000000 --- a/node_modules/depd/lib/compat/index.js +++ /dev/null @@ -1,79 +0,0 @@ -/*! - * depd - * Copyright(c) 2014-2015 Douglas Christopher Wilson - * MIT Licensed - */ - -'use strict' - -/** - * Module dependencies. - * @private - */ - -var EventEmitter = require('events').EventEmitter - -/** - * Module exports. - * @public - */ - -lazyProperty(module.exports, 'callSiteToString', function callSiteToString () { - var limit = Error.stackTraceLimit - var obj = {} - var prep = Error.prepareStackTrace - - function prepareObjectStackTrace (obj, stack) { - return stack - } - - Error.prepareStackTrace = prepareObjectStackTrace - Error.stackTraceLimit = 2 - - // capture the stack - Error.captureStackTrace(obj) - - // slice the stack - var stack = obj.stack.slice() - - Error.prepareStackTrace = prep - Error.stackTraceLimit = limit - - return stack[0].toString ? toString : require('./callsite-tostring') -}) - -lazyProperty(module.exports, 'eventListenerCount', function eventListenerCount () { - return EventEmitter.listenerCount || require('./event-listener-count') -}) - -/** - * Define a lazy property. - */ - -function lazyProperty (obj, prop, getter) { - function get () { - var val = getter() - - Object.defineProperty(obj, prop, { - configurable: true, - enumerable: true, - value: val - }) - - return val - } - - Object.defineProperty(obj, prop, { - configurable: true, - enumerable: true, - get: get - }) -} - -/** - * Call toString() on the obj - */ - -function toString (obj) { - return obj.toString() -} diff --git a/node_modules/depd/package.json b/node_modules/depd/package.json index 5e3c863216e40..3857e199184a0 100644 --- a/node_modules/depd/package.json +++ b/node_modules/depd/package.json @@ -1,7 +1,7 @@ { "name": "depd", "description": "Deprecate all the things", - "version": "1.1.2", + "version": "2.0.0", "author": "Douglas Christopher Wilson ", "license": "MIT", "keywords": [ @@ -13,13 +13,17 @@ "devDependencies": { "benchmark": "2.1.4", "beautify-benchmark": "0.2.4", - "eslint": "3.19.0", - "eslint-config-standard": "7.1.0", + "eslint": "5.7.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.14.0", "eslint-plugin-markdown": "1.0.0-beta.7", - "eslint-plugin-promise": "3.6.0", - "eslint-plugin-standard": "3.0.1", + "eslint-plugin-node": "7.0.1", + "eslint-plugin-promise": "4.0.1", + "eslint-plugin-standard": "4.0.0", "istanbul": "0.4.5", - "mocha": "~1.21.5" + "mocha": "5.2.0", + "safe-buffer": "5.1.2", + "uid-safe": "2.1.5" }, "files": [ "lib/", @@ -29,13 +33,13 @@ "Readme.md" ], "engines": { - "node": ">= 0.6" + "node": ">= 0.8" }, "scripts": { "bench": "node benchmark/index.js", "lint": "eslint --plugin markdown --ext js,md .", "test": "mocha --reporter spec --bail test/", - "test-ci": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --no-exit test/", - "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot test/" + "test-ci": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter spec test/ && istanbul report lcovonly text-summary", + "test-cov": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter dot test/ && istanbul report lcov text-summary" } } diff --git a/node_modules/lru-cache/index.d.ts b/node_modules/lru-cache/index.d.ts index e8af7c4de82f9..b58395e04a9d1 100644 --- a/node_modules/lru-cache/index.d.ts +++ b/node_modules/lru-cache/index.d.ts @@ -112,7 +112,7 @@ declare class LRUCache implements Iterable<[K, V]> { * `cache.set(key, undefined)`. Use {@link has} to determine whether a key is * present in the cache at all. */ - public get(key: K, options?: LRUCache.GetOptions): V | undefined + public get(key: K, options?: LRUCache.GetOptions): V | undefined /** * Like {@link get} but doesn't update recency or delete stale items. @@ -129,7 +129,7 @@ declare class LRUCache implements Iterable<[K, V]> { * Will not update item age unless {@link updateAgeOnHas} is set in the * options or constructor. */ - public has(key: K, options?: LRUCache.HasOptions): boolean + public has(key: K, options?: LRUCache.HasOptions): boolean /** * Deletes a key out of the cache. @@ -158,7 +158,7 @@ declare class LRUCache implements Iterable<[K, V]> { key: K, cache: this ) => boolean | undefined | void, - options?: LRUCache.GetOptions + options?: LRUCache.GetOptions ): V | undefined /** @@ -271,6 +271,20 @@ declare class LRUCache implements Iterable<[K, V]> { public prune(): boolean /** + * Make an asynchronous cached fetch using the {@link fetchMethod} function. + * + * If multiple fetches for the same key are issued, then they will all be + * coalesced into a single call to fetchMethod. + * + * Note that this means that handling options such as + * {@link allowStaleOnFetchAbort}, {@link signal}, and + * {@link allowStaleOnFetchRejection} will be determined by the FIRST fetch() + * call for a given key. + * + * This is a known (fixable) shortcoming which will be addresed on when + * someone complains about it, as the fix would involve added complexity and + * may not be worth the costs for this edge case. + * * since: 7.6.0 */ public fetch( @@ -521,6 +535,8 @@ declare namespace LRUCache { * Set to true to suppress the deletion of stale data when a * {@link fetchMethod} throws an error or returns a rejected promise * + * This may be overridden in the {@link fetchMethod}. + * * @default false * @since 7.10.0 */ @@ -533,11 +549,59 @@ declare namespace LRUCache { * ONLY be returned in the case that the fetch fails, not any other * times. * + * This may be overridden in the {@link fetchMethod}. + * * @default false * @since 7.16.0 */ allowStaleOnFetchRejection?: boolean + /** + * + * Set to true to ignore the `abort` event emitted by the `AbortSignal` + * object passed to {@link fetchMethod}, and still cache the + * resulting resolution value, as long as it is not `undefined`. + * + * When used on its own, this means aborted {@link fetch} calls are not + * immediately resolved or rejected when they are aborted, and instead take + * the full time to await. + * + * When used with {@link allowStaleOnFetchAbort}, aborted {@link fetch} + * calls will resolve immediately to their stale cached value or + * `undefined`, and will continue to process and eventually update the + * cache when they resolve, as long as the resulting value is not + * `undefined`, thus supporting a "return stale on timeout while + * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal. + * + * **Note**: regardless of this setting, an `abort` event _is still emitted + * on the `AbortSignal` object_, so may result in invalid results when + * passed to other underlying APIs that use AbortSignals. + * + * This may be overridden in the {@link fetchMethod} or the call to + * {@link fetch}. + * + * @default false + * @since 7.17.0 + */ + ignoreFetchAbort?: boolean + + /** + * Set to true to return a stale value from the cache when the + * `AbortSignal` passed to the {@link fetchMethod} dispatches an `'abort'` + * event, whether user-triggered, or due to internal cache behavior. + * + * Unless {@link ignoreFetchAbort} is also set, the underlying + * {@link fetchMethod} will still be considered canceled, and its return + * value will be ignored and not cached. + * + * This may be overridden in the {@link fetchMethod} or the call to + * {@link fetch}. + * + * @default false + * @since 7.17.0 + */ + allowStaleOnFetchAbort?: boolean + /** * Set to any value in the constructor or {@link fetch} options to * pass arbitrary data to the {@link fetchMethod} in the {@link context} @@ -585,24 +649,27 @@ declare namespace LRUCache { start?: LRUMilliseconds noDisposeOnSet?: boolean noUpdateTTL?: boolean + status?: Status } /** * options which override the options set in the LRUCAche constructor * when calling {@link has}. */ - interface HasOptions { + interface HasOptions { updateAgeOnHas?: boolean + status: Status } /** * options which override the options set in the LRUCache constructor * when calling {@link get}. */ - interface GetOptions { + interface GetOptions { allowStale?: boolean updateAgeOnGet?: boolean noDeleteOnStaleGet?: boolean + status?: Status } /** @@ -618,8 +685,8 @@ declare namespace LRUCache { * * May be mutated by the {@link fetchMethod} to affect the behavior of the * resulting {@link set} operation on resolution, or in the case of - * {@link noDeleteOnFetchRejection} and {@link allowStaleOnFetchRejection}, - * the handling of failure. + * {@link noDeleteOnFetchRejection}, {@link ignoreFetchAbort}, and + * {@link allowStaleOnFetchRejection}, the handling of failure. */ interface FetcherFetchOptions { allowStale?: boolean @@ -632,6 +699,139 @@ declare namespace LRUCache { noUpdateTTL?: boolean noDeleteOnFetchRejection?: boolean allowStaleOnFetchRejection?: boolean + ignoreFetchAbort?: boolean + allowStaleOnFetchAbort?: boolean + status?: Status + } + + /** + * Status object that may be passed to {@link fetch}, {@link get}, + * {@link set}, and {@link has}. + */ + interface Status { + /** + * The status of a set() operation. + * + * - add: the item was not found in the cache, and was added + * - update: the item was in the cache, with the same value provided + * - replace: the item was in the cache, and replaced + * - miss: the item was not added to the cache for some reason + */ + set?: 'add' | 'update' | 'replace' | 'miss' + + /** + * the ttl stored for the item, or undefined if ttls are not used. + */ + ttl?: LRUMilliseconds + + /** + * the start time for the item, or undefined if ttls are not used. + */ + start?: LRUMilliseconds + + /** + * The timestamp used for TTL calculation + */ + now?: LRUMilliseconds + + /** + * the remaining ttl for the item, or undefined if ttls are not used. + */ + remainingTTL?: LRUMilliseconds + + /** + * The calculated size for the item, if sizes are used. + */ + size?: LRUSize + + /** + * A flag indicating that the item was not stored, due to exceeding the + * {@link maxEntrySize} + */ + maxEntrySizeExceeded?: true + + /** + * The old value, specified in the case of `set:'update'` or + * `set:'replace'` + */ + oldValue?: V + + /** + * The results of a {@link has} operation + * + * - hit: the item was found in the cache + * - stale: the item was found in the cache, but is stale + * - miss: the item was not found in the cache + */ + has?: 'hit' | 'stale' | 'miss' + + /** + * The status of a {@link fetch} operation. + * Note that this can change as the underlying fetch() moves through + * various states. + * + * - inflight: there is another fetch() for this key which is in process + * - get: there is no fetchMethod, so {@link get} was called. + * - miss: the item is not in cache, and will be fetched. + * - hit: the item is in the cache, and was resolved immediately. + * - stale: the item is in the cache, but stale. + * - refresh: the item is in the cache, and not stale, but + * {@link forceRefresh} was specified. + */ + fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh' + + /** + * The {@link fetchMethod} was called + */ + fetchDispatched?: true + + /** + * The cached value was updated after a successful call to fetchMethod + */ + fetchUpdated?: true + + /** + * The reason for a fetch() rejection. Either the error raised by the + * {@link fetchMethod}, or the reason for an AbortSignal. + */ + fetchError?: Error + + /** + * The fetch received an abort signal + */ + fetchAborted?: true + + /** + * The abort signal received was ignored, and the fetch was allowed to + * continue. + */ + fetchAbortIgnored?: true + + /** + * The fetchMethod promise resolved successfully + */ + fetchResolved?: true + + /** + * The fetchMethod promise was rejected + */ + fetchRejected?: true + + /** + * The status of a {@link get} operation. + * + * - fetching: The item is currently being fetched. If a previous value is + * present and allowed, that will be returned. + * - stale: The item is in the cache, and is stale. + * - hit: the item is in the cache + * - miss: the item is not in the cache + */ + get?: 'stale' | 'hit' | 'miss' + + /** + * A fetch or get operation returned a stale value. + */ + returnedStale?: true } /** @@ -645,6 +845,8 @@ declare namespace LRUCache { interface FetchOptions extends FetcherFetchOptions { forceRefresh?: boolean fetchContext?: any + signal?: AbortSignal + status?: Status } interface FetcherOptions { diff --git a/node_modules/lru-cache/index.js b/node_modules/lru-cache/index.js index f4be3476d4dbc..48e99fe5e5a70 100644 --- a/node_modules/lru-cache/index.js +++ b/node_modules/lru-cache/index.js @@ -18,7 +18,8 @@ const AC = hasAbortController this.signal = new AS() } abort(reason = new Error('This operation was aborted')) { - this.signal.reason = reason + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true this.signal.dispatchEvent({ type: 'abort', target: this.signal, @@ -168,6 +169,8 @@ class LRUCache { noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, } = options // deprecated options, don't trigger a warning for getting them if @@ -238,6 +241,8 @@ class LRUCache { this.noUpdateTTL = !!noUpdateTTL this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort // NB: maxEntrySize is set to maxSize if it's set if (this.maxEntrySize !== 0) { @@ -331,6 +336,15 @@ class LRUCache { this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 } + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + // debounce calls to perf.now() to 1s so we're not hitting // that costly call repeatedly. let cachedNow = 0 @@ -372,6 +386,7 @@ class LRUCache { } } updateItemAge(_index) {} + statusTTL(_status, _index) {} setItemTTL(_index, _ttl, _start) {} isStale(_index) { return false @@ -411,7 +426,7 @@ class LRUCache { } return size } - this.addItemSize = (index, size) => { + this.addItemSize = (index, size, status) => { this.sizes[index] = size if (this.maxSize) { const maxSize = this.maxSize - this.sizes[index] @@ -420,6 +435,10 @@ class LRUCache { } } this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } } } removeItemSize(_index) {} @@ -469,19 +488,30 @@ class LRUCache { } isValidIndex(index) { - return this.keyMap.get(this.keyList[index]) === index + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) } *entries() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield [this.keyList[i], this.valList[i]] } } } *rentries() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield [this.keyList[i], this.valList[i]] } } @@ -489,14 +519,20 @@ class LRUCache { *keys() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.keyList[i] } } } *rkeys() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.keyList[i] } } @@ -504,14 +540,20 @@ class LRUCache { *values() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.valList[i] } } } *rvalues() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.valList[i] } } @@ -521,9 +563,14 @@ class LRUCache { return this.entries() } - find(fn, getOptions = {}) { + find(fn, getOptions) { for (const i of this.indexes()) { - if (fn(this.valList[i], this.keyList[i], this)) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { return this.get(this.keyList[i], getOptions) } } @@ -531,13 +578,23 @@ class LRUCache { forEach(fn, thisp = this) { for (const i of this.indexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) } } rforEach(fn, thisp = this) { for (const i of this.rindexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) } } @@ -608,12 +665,17 @@ class LRUCache { size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + status, } = {} ) { size = this.requireSize(k, v, size, sizeCalculation) // if the item doesn't fit, don't do anything // NB: maxEntrySize set to maxSize by default if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } // have to delete, in case a background fetch is there already. // in non-async cases, this is a no-op this.delete(k) @@ -630,7 +692,10 @@ class LRUCache { this.prev[index] = this.tail this.tail = index this.size++ - this.addItemSize(index, size) + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } noUpdateTTL = false } else { // update @@ -649,7 +714,17 @@ class LRUCache { } this.removeItemSize(index) this.valList[index] = v - this.addItemSize(index, size) + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' } } if (ttl !== 0 && this.ttl === 0 && !this.ttls) { @@ -658,6 +733,7 @@ class LRUCache { if (!noUpdateTTL) { this.setItemTTL(index, ttl, start) } + this.statusTTL(status, index) if (this.disposeAfter) { while (this.disposed.length) { this.disposeAfter(...this.disposed.shift()) @@ -713,15 +789,22 @@ class LRUCache { return head } - has(k, { updateAgeOnHas = this.updateAgeOnHas } = {}) { + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { const index = this.keyMap.get(k) if (index !== undefined) { if (!this.isStale(index)) { if (updateAgeOnHas) { this.updateItemAge(index) } + if (status) status.has = 'hit' + this.statusTTL(status, index) return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) } + } else if (status) { + status.has = 'miss' } return false } @@ -742,51 +825,109 @@ class LRUCache { return v } const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } const fetchOpts = { signal: ac.signal, options, context, } - const cb = v => { - if (!ac.signal.aborted) { - this.set(k, v, fetchOpts.options) - return v - } else { - return eb(ac.signal.reason) + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } } + return v } const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection if (this.valList[index] === p) { // if we allow stale on fetch rejections, then we need to ensure that // the stale value is not removed from the cache when the fetch fails. - const noDelete = - options.noDeleteOnFetchRejection || - options.allowStaleOnFetchRejection const del = !noDelete || p.__staleWhileFetching === undefined if (del) { this.delete(k) - } else { + } else if (!allowStaleAborted) { // still replace the *promise* with the stale value, // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. this.valList[index] = p.__staleWhileFetching } } - if (options.allowStaleOnFetchRejection) { + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } return p.__staleWhileFetching } else if (p.__returned === p) { throw er } } const pcall = (res, rej) => { - ac.signal.addEventListener('abort', () => res()) - this.fetchMethod(k, v, fetchOpts).then(res, rej) + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) } + if (options.status) options.status.fetchDispatched = true const p = new Promise(pcall).then(cb, eb) p.__abortController = ac p.__staleWhileFetching = v p.__returned = null if (index === undefined) { - this.set(k, p, fetchOpts.options) + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) index = this.keyMap.get(k) } else { this.valList[index] = p @@ -825,16 +966,21 @@ class LRUCache { // fetch exclusive options noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, fetchContext = this.fetchContext, forceRefresh = false, + status, signal, } = {} ) { if (!this.fetchMethod) { + if (status) status.fetch = 'get' return this.get(k, { allowStale, updateAgeOnGet, noDeleteOnStaleGet, + status, }) } @@ -849,38 +995,53 @@ class LRUCache { noUpdateTTL, noDeleteOnFetchRejection, allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, signal, } let index = this.keyMap.get(k) if (index === undefined) { + if (status) status.fetch = 'miss' const p = this.backgroundFetch(k, index, options, fetchContext) return (p.__returned = p) } else { // in cache, maybe already fetching const v = this.valList[index] if (this.isBackgroundFetch(v)) { - return allowStale && v.__staleWhileFetching !== undefined - ? v.__staleWhileFetching - : (v.__returned = v) + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) } // if we force a refresh, that means do NOT serve the cached value, // unless we are already in the process of refreshing the cache. - if (!forceRefresh && !this.isStale(index)) { + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' this.moveToTail(index) if (updateAgeOnGet) { this.updateItemAge(index) } + this.statusTTL(status, index) return v } // ok, it is stale or a forced refresh, and not already fetching. // refresh the cache. const p = this.backgroundFetch(k, index, options, fetchContext) - return allowStale && p.__staleWhileFetching !== undefined - ? p.__staleWhileFetching - : (p.__returned = p) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) } } @@ -890,28 +1051,39 @@ class LRUCache { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, } = {} ) { const index = this.keyMap.get(k) if (index !== undefined) { const value = this.valList[index] const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) if (this.isStale(index)) { + if (status) status.get = 'stale' // delete only if not an in-flight background fetch if (!fetching) { if (!noDeleteOnStaleGet) { this.delete(k) } + if (status) status.returnedStale = allowStale return allowStale ? value : undefined } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } return allowStale ? value.__staleWhileFetching : undefined } } else { + if (status) status.get = 'hit' // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching, - // so we just return undefined + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. if (fetching) { - return undefined + return value.__staleWhileFetching } this.moveToTail(index) if (updateAgeOnGet) { @@ -919,6 +1091,8 @@ class LRUCache { } return value } + } else if (status) { + status.get = 'miss' } } diff --git a/node_modules/lru-cache/index.mjs b/node_modules/lru-cache/index.mjs index e69e77fbb3456..4a0b4813ec515 100644 --- a/node_modules/lru-cache/index.mjs +++ b/node_modules/lru-cache/index.mjs @@ -18,7 +18,8 @@ const AC = hasAbortController this.signal = new AS() } abort(reason = new Error('This operation was aborted')) { - this.signal.reason = reason + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true this.signal.dispatchEvent({ type: 'abort', target: this.signal, @@ -168,6 +169,8 @@ class LRUCache { noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, } = options // deprecated options, don't trigger a warning for getting them if @@ -238,6 +241,8 @@ class LRUCache { this.noUpdateTTL = !!noUpdateTTL this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort // NB: maxEntrySize is set to maxSize if it's set if (this.maxEntrySize !== 0) { @@ -331,6 +336,15 @@ class LRUCache { this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 } + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + // debounce calls to perf.now() to 1s so we're not hitting // that costly call repeatedly. let cachedNow = 0 @@ -372,6 +386,7 @@ class LRUCache { } } updateItemAge(_index) {} + statusTTL(_status, _index) {} setItemTTL(_index, _ttl, _start) {} isStale(_index) { return false @@ -411,7 +426,7 @@ class LRUCache { } return size } - this.addItemSize = (index, size) => { + this.addItemSize = (index, size, status) => { this.sizes[index] = size if (this.maxSize) { const maxSize = this.maxSize - this.sizes[index] @@ -420,6 +435,10 @@ class LRUCache { } } this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } } } removeItemSize(_index) {} @@ -469,19 +488,30 @@ class LRUCache { } isValidIndex(index) { - return this.keyMap.get(this.keyList[index]) === index + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) } *entries() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield [this.keyList[i], this.valList[i]] } } } *rentries() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield [this.keyList[i], this.valList[i]] } } @@ -489,14 +519,20 @@ class LRUCache { *keys() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.keyList[i] } } } *rkeys() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.keyList[i] } } @@ -504,14 +540,20 @@ class LRUCache { *values() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.valList[i] } } } *rvalues() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.valList[i] } } @@ -521,9 +563,14 @@ class LRUCache { return this.entries() } - find(fn, getOptions = {}) { + find(fn, getOptions) { for (const i of this.indexes()) { - if (fn(this.valList[i], this.keyList[i], this)) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { return this.get(this.keyList[i], getOptions) } } @@ -531,13 +578,23 @@ class LRUCache { forEach(fn, thisp = this) { for (const i of this.indexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) } } rforEach(fn, thisp = this) { for (const i of this.rindexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) } } @@ -608,12 +665,17 @@ class LRUCache { size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + status, } = {} ) { size = this.requireSize(k, v, size, sizeCalculation) // if the item doesn't fit, don't do anything // NB: maxEntrySize set to maxSize by default if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } // have to delete, in case a background fetch is there already. // in non-async cases, this is a no-op this.delete(k) @@ -630,7 +692,10 @@ class LRUCache { this.prev[index] = this.tail this.tail = index this.size++ - this.addItemSize(index, size) + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } noUpdateTTL = false } else { // update @@ -649,7 +714,17 @@ class LRUCache { } this.removeItemSize(index) this.valList[index] = v - this.addItemSize(index, size) + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' } } if (ttl !== 0 && this.ttl === 0 && !this.ttls) { @@ -658,6 +733,7 @@ class LRUCache { if (!noUpdateTTL) { this.setItemTTL(index, ttl, start) } + this.statusTTL(status, index) if (this.disposeAfter) { while (this.disposed.length) { this.disposeAfter(...this.disposed.shift()) @@ -713,15 +789,22 @@ class LRUCache { return head } - has(k, { updateAgeOnHas = this.updateAgeOnHas } = {}) { + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { const index = this.keyMap.get(k) if (index !== undefined) { if (!this.isStale(index)) { if (updateAgeOnHas) { this.updateItemAge(index) } + if (status) status.has = 'hit' + this.statusTTL(status, index) return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) } + } else if (status) { + status.has = 'miss' } return false } @@ -742,51 +825,109 @@ class LRUCache { return v } const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } const fetchOpts = { signal: ac.signal, options, context, } - const cb = v => { - if (!ac.signal.aborted) { - this.set(k, v, fetchOpts.options) - return v - } else { - return eb(ac.signal.reason) + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } } + return v } const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection if (this.valList[index] === p) { // if we allow stale on fetch rejections, then we need to ensure that // the stale value is not removed from the cache when the fetch fails. - const noDelete = - options.noDeleteOnFetchRejection || - options.allowStaleOnFetchRejection const del = !noDelete || p.__staleWhileFetching === undefined if (del) { this.delete(k) - } else { + } else if (!allowStaleAborted) { // still replace the *promise* with the stale value, // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. this.valList[index] = p.__staleWhileFetching } } - if (options.allowStaleOnFetchRejection) { + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } return p.__staleWhileFetching } else if (p.__returned === p) { throw er } } const pcall = (res, rej) => { - ac.signal.addEventListener('abort', () => res()) - this.fetchMethod(k, v, fetchOpts).then(res, rej) + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) } + if (options.status) options.status.fetchDispatched = true const p = new Promise(pcall).then(cb, eb) p.__abortController = ac p.__staleWhileFetching = v p.__returned = null if (index === undefined) { - this.set(k, p, fetchOpts.options) + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) index = this.keyMap.get(k) } else { this.valList[index] = p @@ -825,16 +966,21 @@ class LRUCache { // fetch exclusive options noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, fetchContext = this.fetchContext, forceRefresh = false, + status, signal, } = {} ) { if (!this.fetchMethod) { + if (status) status.fetch = 'get' return this.get(k, { allowStale, updateAgeOnGet, noDeleteOnStaleGet, + status, }) } @@ -849,38 +995,53 @@ class LRUCache { noUpdateTTL, noDeleteOnFetchRejection, allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, signal, } let index = this.keyMap.get(k) if (index === undefined) { + if (status) status.fetch = 'miss' const p = this.backgroundFetch(k, index, options, fetchContext) return (p.__returned = p) } else { // in cache, maybe already fetching const v = this.valList[index] if (this.isBackgroundFetch(v)) { - return allowStale && v.__staleWhileFetching !== undefined - ? v.__staleWhileFetching - : (v.__returned = v) + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) } // if we force a refresh, that means do NOT serve the cached value, // unless we are already in the process of refreshing the cache. - if (!forceRefresh && !this.isStale(index)) { + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' this.moveToTail(index) if (updateAgeOnGet) { this.updateItemAge(index) } + this.statusTTL(status, index) return v } // ok, it is stale or a forced refresh, and not already fetching. // refresh the cache. const p = this.backgroundFetch(k, index, options, fetchContext) - return allowStale && p.__staleWhileFetching !== undefined - ? p.__staleWhileFetching - : (p.__returned = p) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) } } @@ -890,28 +1051,39 @@ class LRUCache { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, } = {} ) { const index = this.keyMap.get(k) if (index !== undefined) { const value = this.valList[index] const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) if (this.isStale(index)) { + if (status) status.get = 'stale' // delete only if not an in-flight background fetch if (!fetching) { if (!noDeleteOnStaleGet) { this.delete(k) } + if (status) status.returnedStale = allowStale return allowStale ? value : undefined } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } return allowStale ? value.__staleWhileFetching : undefined } } else { + if (status) status.get = 'hit' // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching, - // so we just return undefined + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. if (fetching) { - return undefined + return value.__staleWhileFetching } this.moveToTail(index) if (updateAgeOnGet) { @@ -919,6 +1091,8 @@ class LRUCache { } return value } + } else if (status) { + status.get = 'miss' } } diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json index fb90c93901b09..9684991727e7a 100644 --- a/node_modules/lru-cache/package.json +++ b/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "7.16.2", + "version": "7.18.3", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -13,7 +13,7 @@ "build": "npm run prepare", "pretest": "npm run prepare", "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.mjs", + "prepare": "node ./scripts/transpile-to-esm.js", "size": "size-limit", "test": "tap", "snap": "tap", diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_duplex.js index 67525192250f6..c51c67d414456 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_duplex.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_duplex.js @@ -18,66 +18,54 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + // a duplex stream is just a stream that is both readable and writable. // Since JS doesn't have multiple prototypal inheritance, this class // prototypally inherits from Readable, and then parasitically from // Writable. + 'use strict'; -/**/ +/**/ var objectKeys = Object.keys || function (obj) { var keys = []; - - for (var key in obj) { - keys.push(key); - } - + for (var key in obj) keys.push(key); return keys; }; /**/ - module.exports = Duplex; - -var Readable = require('./_stream_readable'); - -var Writable = require('./_stream_writable'); - +const Readable = require('./_stream_readable'); +const Writable = require('./_stream_writable'); require('inherits')(Duplex, Readable); - { // Allow the keys array to be GC'ed. - var keys = objectKeys(Writable.prototype); - + const keys = objectKeys(Writable.prototype); for (var v = 0; v < keys.length; v++) { - var method = keys[v]; + const method = keys[v]; if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; } } - function Duplex(options) { if (!(this instanceof Duplex)) return new Duplex(options); Readable.call(this, options); Writable.call(this, options); this.allowHalfOpen = true; - if (options) { if (options.readable === false) this.readable = false; if (options.writable === false) this.writable = false; - if (options.allowHalfOpen === false) { this.allowHalfOpen = false; this.once('end', onend); } } } - Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { return this._writableState.highWaterMark; } }); @@ -95,44 +83,43 @@ Object.defineProperty(Duplex.prototype, 'writableLength', { // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { return this._writableState.length; } -}); // the no-half-open enforcer +}); +// the no-half-open enforcer function onend() { // If the writable side ended, then we're ok. - if (this._writableState.ended) return; // no more data can be written. - // But allow more writes to happen in this tick. + if (this._writableState.ended) return; + // no more data can be written. + // But allow more writes to happen in this tick. process.nextTick(onEndNT, this); } - function onEndNT(self) { self.end(); } - Object.defineProperty(Duplex.prototype, 'destroyed', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { if (this._readableState === undefined || this._writableState === undefined) { return false; } - return this._readableState.destroyed && this._writableState.destroyed; }, - set: function set(value) { + set(value) { // we ignore the value if the stream // has not been initialized yet if (this._readableState === undefined || this._writableState === undefined) { return; - } // backward compatibility, the user is explicitly - // managing destroyed - + } + // backward compatibility, the user is explicitly + // managing destroyed this._readableState.destroyed = value; this._writableState.destroyed = value; } diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_passthrough.js index 32e7414c5a827..38a1eaac80794 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_passthrough.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_passthrough.js @@ -18,22 +18,20 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + // a passthrough stream. // basically just the most minimal sort of Transform stream. // Every written chunk gets output as-is. + 'use strict'; module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - +const Transform = require('./_stream_transform'); require('inherits')(PassThrough, Transform); - function PassThrough(options) { if (!(this instanceof PassThrough)) return new PassThrough(options); Transform.call(this, options); } - PassThrough.prototype._transform = function (chunk, encoding, cb) { cb(null, chunk); }; \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_readable.js index 192d451488f20..8e3af6c9edece 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_readable.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_readable.js @@ -18,49 +18,40 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + 'use strict'; module.exports = Readable; -/**/ +/**/ var Duplex; /**/ Readable.ReadableState = ReadableState; -/**/ - -var EE = require('events').EventEmitter; +/**/ +const EE = require('events').EventEmitter; var EElistenerCount = function EElistenerCount(emitter, type) { return emitter.listeners(type).length; }; /**/ /**/ - - var Stream = require('./internal/streams/stream'); /**/ - -var Buffer = require('buffer').Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - +const Buffer = require('buffer').Buffer; +const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); } - function _isUint8Array(obj) { return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } -/**/ - - -var debugUtil = require('util'); - -var debug; +/**/ +const debugUtil = require('util'); +let debug; if (debugUtil && debugUtil.debuglog) { debug = debugUtil.debuglog('stream'); } else { @@ -68,60 +59,57 @@ if (debugUtil && debugUtil.debuglog) { } /**/ - -var BufferList = require('./internal/streams/buffer_list'); - -var destroyImpl = require('./internal/streams/destroy'); - -var _require = require('./internal/streams/state'), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = require('../errors').codes, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. - - -var StringDecoder; -var createReadableStreamAsyncIterator; -var from; - +const BufferList = require('./internal/streams/buffer_list'); +const destroyImpl = require('./internal/streams/destroy'); +const _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +const _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +let StringDecoder; +let createReadableStreamAsyncIterator; +let from; require('inherits')(Readable, Stream); - -var errorOrDestroy = destroyImpl.errorOrDestroy; -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; - +const errorOrDestroy = destroyImpl.errorOrDestroy; +const kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; function prependListener(emitter, event, fn) { // Sadly this is not cacheable as some libraries bundle their own // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any // userland ones. NEVER DO THIS. This is here only because this code needs // to continue to work with older versions of Node.js that do not include // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; } - function ReadableState(options, stream, isDuplex) { Duplex = Duplex || require('./_stream_duplex'); - options = options || {}; // Duplex streams are both readable and writable, but share + options = options || {}; + + // Duplex streams are both readable and writable, but share // the same options object. // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream. // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to + // object stream flag. Used to make read(n) ignore n and to // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); - this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the + // A linked list is used to store data chunks instead of an array because the // linked list can remove elements from the beginning faster than // array.shift() - this.buffer = new BufferList(); this.length = 0; this.pipes = null; @@ -129,141 +117,136 @@ function ReadableState(options, stream, isDuplex) { this.flowing = null; this.ended = false; this.endEmitted = false; - this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted // immediately, or on a later tick. We set this to true at first, because // any actions that shouldn't happen until "later" should generally also // not happen before the first read call. + this.sync = true; - this.sync = true; // whenever we return null, then we set a flag to say + // whenever we return null, then we set a flag to say // that we're awaiting a 'readable' event emission. - this.needReadable = false; this.emittedReadable = false; this.readableListening = false; this.resumeScheduled = false; - this.paused = true; // Should close be emitted on destroy. Defaults to true. + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; - this.autoDestroy = !!options.autoDestroy; // has it been destroyed + // has it been destroyed + this.destroyed = false; - this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string + // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s - - this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + // if true, a maybeReadMore has been scheduled this.readingMore = false; this.decoder = null; this.encoding = null; - if (options.encoding) { if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; this.decoder = new StringDecoder(options.encoding); this.encoding = options.encoding; } } - function Readable(options) { Duplex = Duplex || require('./_stream_duplex'); - if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5 + if (!(this instanceof Readable)) return new Readable(options); - var isDuplex = this instanceof Duplex; - this._readableState = new ReadableState(options, this, isDuplex); // legacy + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + const isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + // legacy this.readable = true; - if (options) { if (typeof options.read === 'function') this._read = options.read; if (typeof options.destroy === 'function') this._destroy = options.destroy; } - Stream.call(this); } - Object.defineProperty(Readable.prototype, 'destroyed', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { if (this._readableState === undefined) { return false; } - return this._readableState.destroyed; }, - set: function set(value) { + set(value) { // we ignore the value if the stream // has not been initialized yet if (!this._readableState) { return; - } // backward compatibility, the user is explicitly - // managing destroyed - + } + // backward compatibility, the user is explicitly + // managing destroyed this._readableState.destroyed = value; } }); Readable.prototype.destroy = destroyImpl.destroy; Readable.prototype._undestroy = destroyImpl.undestroy; - Readable.prototype._destroy = function (err, cb) { cb(err); -}; // Manually shove something into the read() buffer. +}; + +// Manually shove something into the read() buffer. // This returns true if the highWaterMark has not been hit yet, // similar to how Writable.write() returns true if you should // write() some more. - - Readable.prototype.push = function (chunk, encoding) { var state = this._readableState; var skipChunkCheck; - if (!state.objectMode) { if (typeof chunk === 'string') { encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { chunk = Buffer.from(chunk, encoding); encoding = ''; } - skipChunkCheck = true; } } else { skipChunkCheck = true; } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; // Unshift should *always* be something directly out of read() - +}; +// Unshift should *always* be something directly out of read() Readable.prototype.unshift = function (chunk) { return readableAddChunk(this, chunk, null, true, false); }; - function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { debug('readableAddChunk', chunk); var state = stream._readableState; - if (chunk === null) { state.reading = false; onEofChunk(stream, state); } else { var er; if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { errorOrDestroy(stream, er); } else if (state.objectMode || chunk && chunk.length > 0) { if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { chunk = _uint8ArrayToBuffer(chunk); } - if (addToFront) { if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); } else if (state.ended) { @@ -272,7 +255,6 @@ function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { return false; } else { state.reading = false; - if (state.decoder && !encoding) { chunk = state.decoder.write(chunk); if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); @@ -284,14 +266,13 @@ function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { state.reading = false; maybeReadMore(stream, state); } - } // We can push more data if we are below the highWaterMark. + } + + // We can push more data if we are below the highWaterMark. // Also, if we have no data yet, we can stand some more bytes. // This is to work around cases where hwm=0, such as the repl. - - return !state.ended && (state.length < state.highWaterMark || state.length === 0); } - function addChunk(stream, state, chunk, addToFront) { if (state.flowing && state.length === 0 && !state.sync) { state.awaitDrain = 0; @@ -302,50 +283,42 @@ function addChunk(stream, state, chunk, addToFront) { if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); if (state.needReadable) emitReadable(stream); } - maybeReadMore(stream, state); } - function chunkInvalid(state, chunk) { var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); } - return er; } - Readable.prototype.isPaused = function () { return this._readableState.flowing === false; -}; // backwards compatibility. - +}; +// backwards compatibility. Readable.prototype.setEncoding = function (enc) { if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - var decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 - - this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: - - var p = this._readableState.buffer.head; - var content = ''; - + const decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + let p = this._readableState.buffer.head; + let content = ''; while (p !== null) { content += decoder.write(p.data); p = p.next; } - this._readableState.buffer.clear(); - if (content !== '') this._readableState.buffer.push(content); this._readableState.length = content.length; return this; -}; // Don't raise the hwm > 1GB - - -var MAX_HWM = 0x40000000; +}; +// Don't raise the hwm > 1GB +const MAX_HWM = 0x40000000; function computeNewHighWaterMark(n) { if (n >= MAX_HWM) { // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. @@ -361,55 +334,54 @@ function computeNewHighWaterMark(n) { n |= n >>> 16; n++; } - return n; -} // This function is designed to be inlinable, so please take care when making -// changes to the function body. - +} +// This function is designed to be inlinable, so please take care when making +// changes to the function body. function howMuchToRead(n, state) { if (n <= 0 || state.length === 0 && state.ended) return 0; if (state.objectMode) return 1; - if (n !== n) { // Only flow one buffer at a time if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } // If we're asking for more than the current hwm, then raise the hwm. - - + } + // If we're asking for more than the current hwm, then raise the hwm. if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; // Don't have enough - + if (n <= state.length) return n; + // Don't have enough if (!state.ended) { state.needReadable = true; return 0; } - return state.length; -} // you can override either this method, or the async _read(n) below. - +} +// you can override either this method, or the async _read(n) below. Readable.prototype.read = function (n) { debug('read', n); n = parseInt(n, 10); var state = this._readableState; var nOrig = n; - if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we // already have a bunch of data in the buffer, then just trigger // the 'readable' event and move on. - if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { debug('read: emitReadable', state.length, state.ended); if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); return null; } + n = howMuchToRead(n, state); - n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. - + // if we've ended, and we're now clear, then finish it up. if (n === 0 && state.ended) { if (state.length === 0) endReadable(this); return null; - } // All the actual chunk generation logic needs to be + } + + // All the actual chunk generation logic needs to be // *below* the call to _read. The reason is that in certain // synthetic stream cases, such as passthrough streams, _read // may be a completely synchronous operation which may change @@ -430,40 +402,37 @@ Readable.prototype.read = function (n) { // 'readable' etc. // // 3. Actually pull the requested chunks out of the buffer and return. - // if we need a readable event, then we need to do some reading. - + // if we need a readable event, then we need to do some reading. var doRead = state.needReadable; - debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some + debug('need readable', doRead); + // if we currently have less than the highWaterMark, then also read some if (state.length === 0 || state.length - n < state.highWaterMark) { doRead = true; debug('length less than watermark', doRead); - } // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - + } + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. if (state.ended || state.reading) { doRead = false; debug('reading or ended', doRead); } else if (doRead) { debug('do read'); state.reading = true; - state.sync = true; // if the length is currently zero, then we *need* a readable event. - - if (state.length === 0) state.needReadable = true; // call internal read method - + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method this._read(state.highWaterMark); - - state.sync = false; // If _read pushed data synchronously, then `reading` will be false, + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); } - var ret; if (n > 0) ret = fromList(n, state);else ret = null; - if (ret === null) { state.needReadable = state.length <= state.highWaterMark; n = 0; @@ -471,34 +440,28 @@ Readable.prototype.read = function (n) { state.length -= n; state.awaitDrain = 0; } - if (state.length === 0) { // If we have nothing in the buffer, then we want to know // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. + if (!state.ended) state.needReadable = true; + // If we tried to read() past the EOF, then emit end on the next tick. if (nOrig !== n && state.ended) endReadable(this); } - if (ret !== null) this.emit('data', ret); return ret; }; - function onEofChunk(stream, state) { debug('onEofChunk'); if (state.ended) return; - if (state.decoder) { var chunk = state.decoder.end(); - if (chunk && chunk.length) { state.buffer.push(chunk); state.length += state.objectMode ? 1 : chunk.length; } } - state.ended = true; - if (state.sync) { // if we are sync, wait until next tick to emit the data. // Otherwise we risk emitting data in the flow() @@ -507,61 +470,56 @@ function onEofChunk(stream, state) { } else { // emit 'readable' now to make sure it gets picked up. state.needReadable = false; - if (!state.emittedReadable) { state.emittedReadable = true; emitReadable_(stream); } } -} // Don't emit readable right away in sync mode, because this can trigger +} + +// Don't emit readable right away in sync mode, because this can trigger // another read() call => stack overflow. This way, it might trigger // a nextTick recursion warning, but that's not so bad. - - function emitReadable(stream) { var state = stream._readableState; debug('emitReadable', state.needReadable, state.emittedReadable); state.needReadable = false; - if (!state.emittedReadable) { debug('emitReadable', state.flowing); state.emittedReadable = true; process.nextTick(emitReadable_, stream); } } - function emitReadable_(stream) { var state = stream._readableState; debug('emitReadable_', state.destroyed, state.length, state.ended); - if (!state.destroyed && (state.length || state.ended)) { stream.emit('readable'); state.emittedReadable = false; - } // The stream needs another readable event if + } + + // The stream needs another readable event if // 1. It is not flowing, as the flow mechanism will take // care of it. // 2. It is not ended. // 3. It is below the highWaterMark, so we can schedule // another readable later. - - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; flow(stream); -} // at this point, the user has presumably seen the 'readable' event, +} + +// at this point, the user has presumably seen the 'readable' event, // and called read() to consume some data. that may have triggered // in turn another _read(n) call, in which case reading = true if // it's in progress. // However, if we're not ended, or reading, and the length < hwm, // then go ahead and try to read some more preemptively. - - function maybeReadMore(stream, state) { if (!state.readingMore) { state.readingMore = true; process.nextTick(maybeReadMore_, stream, state); } } - function maybeReadMore_(stream, state) { // Attempt to read more data if we should. // @@ -587,52 +545,45 @@ function maybeReadMore_(stream, state) { // read()s. The execution ends in this method again after the _read() ends // up calling push() with more data. while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { - var len = state.length; + const len = state.length; debug('maybeReadMore read 0'); stream.read(0); - if (len === state.length) // didn't get any data, stop spinning. + if (len === state.length) + // didn't get any data, stop spinning. break; } - state.readingMore = false; -} // abstract method. to be overridden in specific implementation classes. +} + +// abstract method. to be overridden in specific implementation classes. // call cb(er, data) where data is <= n in length. // for virtual (non-string, non-buffer) streams, "length" is somewhat // arbitrary, and perhaps not very meaningful. - - Readable.prototype._read = function (n) { errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); }; - Readable.prototype.pipe = function (dest, pipeOpts) { var src = this; var state = this._readableState; - switch (state.pipesCount) { case 0: state.pipes = dest; break; - case 1: state.pipes = [state.pipes, dest]; break; - default: state.pipes.push(dest); break; } - state.pipesCount += 1; debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; var endFn = doEnd ? onend : unpipe; if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { debug('onunpipe'); - if (readable === src) { if (unpipeInfo && unpipeInfo.hasUnpiped === false) { unpipeInfo.hasUnpiped = true; @@ -640,23 +591,21 @@ Readable.prototype.pipe = function (dest, pipeOpts) { } } } - function onend() { debug('onend'); dest.end(); - } // when the dest drains, it reduces the awaitDrain counter + } + + // when the dest drains, it reduces the awaitDrain counter // on the source. This would be more elegant with a .once() // handler in flow(), but adding and removing repeatedly is // too slow. - - var ondrain = pipeOnDrain(src); dest.on('drain', ondrain); var cleanedUp = false; - function cleanup() { - debug('cleanup'); // cleanup event handlers once the pipe is broken - + debug('cleanup'); + // cleanup event handlers once the pipe is broken dest.removeListener('close', onclose); dest.removeListener('finish', onfinish); dest.removeListener('drain', ondrain); @@ -665,22 +614,20 @@ Readable.prototype.pipe = function (dest, pipeOpts) { src.removeListener('end', onend); src.removeListener('end', unpipe); src.removeListener('data', ondata); - cleanedUp = true; // if the reader is waiting for a drain event from this + cleanedUp = true; + + // if the reader is waiting for a drain event from this // specific writer, then it would cause it to never start // flowing again. // So, if this is awaiting a drain, then we just call it now. // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); } - src.on('data', ondata); - function ondata(chunk) { debug('ondata'); var ret = dest.write(chunk); debug('dest.write', ret); - if (ret === false) { // If the user unpiped during `dest.write()`, it is possible // to get stuck in a permanently paused state if that write @@ -690,87 +637,84 @@ Readable.prototype.pipe = function (dest, pipeOpts) { debug('false write response, pause', state.awaitDrain); state.awaitDrain++; } - src.pause(); } - } // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - + } + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. function onerror(er) { debug('onerror', er); unpipe(); dest.removeListener('error', onerror); if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); - } // Make sure our error handler is attached before userland ones. - + } - prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + // Both close and finish should trigger unpipe, but only once. function onclose() { dest.removeListener('finish', onfinish); unpipe(); } - dest.once('close', onclose); - function onfinish() { debug('onfinish'); dest.removeListener('close', onclose); unpipe(); } - dest.once('finish', onfinish); - function unpipe() { debug('unpipe'); src.unpipe(dest); - } // tell the dest that it's being piped to - + } - dest.emit('pipe', src); // start the flow if it hasn't been started already. + // tell the dest that it's being piped to + dest.emit('pipe', src); + // start the flow if it hasn't been started already. if (!state.flowing) { debug('pipe resume'); src.resume(); } - return dest; }; - function pipeOnDrain(src) { return function pipeOnDrainFunctionResult() { var state = src._readableState; debug('pipeOnDrain', state.awaitDrain); if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { state.flowing = true; flow(src); } }; } - Readable.prototype.unpipe = function (dest) { var state = this._readableState; var unpipeInfo = { hasUnpiped: false - }; // if we're not piping anywhere, then do nothing. + }; - if (state.pipesCount === 0) return this; // just one destination. most common case. + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + // just one destination. most common case. if (state.pipesCount === 1) { // passed in one, but it's not the right one. if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; // got a match. + if (!dest) dest = state.pipes; + // got a match. state.pipes = null; state.pipesCount = 0; state.flowing = false; if (dest) dest.emit('unpipe', this, unpipeInfo); return this; - } // slow case. multiple pipe destinations. + } + // slow case. multiple pipe destinations. if (!dest) { // remove all. @@ -779,17 +723,13 @@ Readable.prototype.unpipe = function (dest) { state.pipes = null; state.pipesCount = 0; state.flowing = false; - - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, { - hasUnpiped: false - }); - } - + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); return this; - } // try to find the right one. - + } + // try to find the right one. var index = indexOf(state.pipes, dest); if (index === -1) return this; state.pipes.splice(index, 1); @@ -797,19 +737,19 @@ Readable.prototype.unpipe = function (dest) { if (state.pipesCount === 1) state.pipes = state.pipes[0]; dest.emit('unpipe', this, unpipeInfo); return this; -}; // set up data events if they are asked for -// Ensure readable listeners eventually get something - +}; +// set up data events if they are asked for +// Ensure readable listeners eventually get something Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - var state = this._readableState; - + const res = Stream.prototype.on.call(this, ev, fn); + const state = this._readableState; if (ev === 'data') { // update readableListening so that resume() may be a no-op // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + state.readableListening = this.listenerCount('readable') > 0; + // Try start flowing on next tick if stream isn't explicitly paused if (state.flowing !== false) this.resume(); } else if (ev === 'readable') { if (!state.endEmitted && !state.readableListening) { @@ -817,7 +757,6 @@ Readable.prototype.on = function (ev, fn) { state.flowing = false; state.emittedReadable = false; debug('on readable', state.length, state.reading); - if (state.length) { emitReadable(this); } else if (!state.reading) { @@ -825,15 +764,11 @@ Readable.prototype.on = function (ev, fn) { } } } - return res; }; - Readable.prototype.addListener = Readable.prototype.on; - Readable.prototype.removeListener = function (ev, fn) { - var res = Stream.prototype.removeListener.call(this, ev, fn); - + const res = Stream.prototype.removeListener.call(this, ev, fn); if (ev === 'readable') { // We need to check if there is someone still listening to // readable and reset the state. However this needs to happen @@ -843,13 +778,10 @@ Readable.prototype.removeListener = function (ev, fn) { // effect. process.nextTick(updateReadableListening, this); } - return res; }; - Readable.prototype.removeAllListeners = function (ev) { - var res = Stream.prototype.removeAllListeners.apply(this, arguments); - + const res = Stream.prototype.removeAllListeners.apply(this, arguments); if (ev === 'readable' || ev === undefined) { // We need to check if there is someone still listening to // readable and reset the state. However this needs to happen @@ -859,121 +791,102 @@ Readable.prototype.removeAllListeners = function (ev) { // effect. process.nextTick(updateReadableListening, this); } - return res; }; - function updateReadableListening(self) { - var state = self._readableState; + const state = self._readableState; state.readableListening = self.listenerCount('readable') > 0; - if (state.resumeScheduled && !state.paused) { // flowing needs to be set to true now, otherwise // the upcoming resume will not flow. - state.flowing = true; // crude way to check if we should resume + state.flowing = true; + + // crude way to check if we should resume } else if (self.listenerCount('data') > 0) { self.resume(); } } - function nReadingNextTick(self) { debug('readable nexttick read 0'); self.read(0); -} // pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. - +} +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. Readable.prototype.resume = function () { var state = this._readableState; - if (!state.flowing) { - debug('resume'); // we flow only if there is no one listening + debug('resume'); + // we flow only if there is no one listening // for readable, but we still have to call // resume() - state.flowing = !state.readableListening; resume(this, state); } - state.paused = false; return this; }; - function resume(stream, state) { if (!state.resumeScheduled) { state.resumeScheduled = true; process.nextTick(resume_, stream, state); } } - function resume_(stream, state) { debug('resume', state.reading); - if (!state.reading) { stream.read(0); } - state.resumeScheduled = false; stream.emit('resume'); flow(stream); if (state.flowing && !state.reading) stream.read(0); } - Readable.prototype.pause = function () { debug('call pause flowing=%j', this._readableState.flowing); - if (this._readableState.flowing !== false) { debug('pause'); this._readableState.flowing = false; this.emit('pause'); } - this._readableState.paused = true; return this; }; - function flow(stream) { - var state = stream._readableState; + const state = stream._readableState; debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} - while (state.flowing && stream.read() !== null) { - ; - } -} // wrap an old-style stream as the async data source. +// wrap an old-style stream as the async data source. // This is *not* part of the readable stream interface. // It is an ugly unfortunate mess of history. - - Readable.prototype.wrap = function (stream) { - var _this = this; - var state = this._readableState; var paused = false; - stream.on('end', function () { + stream.on('end', () => { debug('wrapped end'); - if (state.decoder && !state.ended) { var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); + if (chunk && chunk.length) this.push(chunk); } - - _this.push(null); + this.push(null); }); - stream.on('data', function (chunk) { + stream.on('data', chunk => { debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode + if (state.decoder) chunk = state.decoder.write(chunk); + // don't skip over falsy values in objectMode if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = _this.push(chunk); - + var ret = this.push(chunk); if (!ret) { paused = true; stream.pause(); } - }); // proxy all the other methods. - // important when wrapping filters and duplexes. + }); + // proxy all the other methods. + // important when wrapping filters and duplexes. for (var i in stream) { if (this[i] === undefined && typeof stream[i] === 'function') { this[i] = function methodWrap(method) { @@ -982,37 +895,32 @@ Readable.prototype.wrap = function (stream) { }; }(i); } - } // proxy certain important events. - + } + // proxy certain important events. for (var n = 0; n < kProxyEvents.length; n++) { stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } // when we try to consume some more bytes, simply unpause the - // underlying stream. - + } - this._read = function (n) { + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = n => { debug('wrapped _read', n); - if (paused) { paused = false; stream.resume(); } }; - return this; }; - if (typeof Symbol === 'function') { Readable.prototype[Symbol.asyncIterator] = function () { if (createReadableStreamAsyncIterator === undefined) { createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); } - return createReadableStreamAsyncIterator(this); }; } - Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in @@ -1044,22 +952,24 @@ Object.defineProperty(Readable.prototype, 'readableFlowing', { this._readableState.flowing = state; } } -}); // exposed for testing purposes only. +}); +// exposed for testing purposes only. Readable._fromList = fromList; Object.defineProperty(Readable.prototype, 'readableLength', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { return this._readableState.length; } -}); // Pluck off n bytes from an array of buffers. +}); + +// Pluck off n bytes from an array of buffers. // Length is the combined lengths of all the buffers in the list. // This function is designed to be inlinable, so please take care when making // changes to the function body. - function fromList(n, state) { // nothing buffered if (state.length === 0) return null; @@ -1074,51 +984,43 @@ function fromList(n, state) { } return ret; } - function endReadable(stream) { var state = stream._readableState; debug('endReadable', state.endEmitted); - if (!state.endEmitted) { state.ended = true; process.nextTick(endReadableNT, state, stream); } } - function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. + debug('endReadableNT', state.endEmitted, state.length); + // Check that we didn't get one last unshift. if (!state.endEmitted && state.length === 0) { state.endEmitted = true; stream.readable = false; stream.emit('end'); - if (state.autoDestroy) { // In case of duplex streams we need a way to detect // if the writable side is ready for autoDestroy as well - var wState = stream._writableState; - + const wState = stream._writableState; if (!wState || wState.autoDestroy && wState.finished) { stream.destroy(); } } } } - if (typeof Symbol === 'function') { Readable.from = function (iterable, opts) { if (from === undefined) { from = require('./internal/streams/from'); } - return from(Readable, iterable, opts); }; } - function indexOf(xs, x) { for (var i = 0, l = xs.length; i < l; i++) { if (xs[i] === x) return i; } - return -1; } \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_transform.js index 41a738c4e9359..a2fcca2193cf1 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_transform.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_transform.js @@ -18,6 +18,7 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + // a transform stream is a readable/writable stream where you do // something with the data. Sometimes it's called a "filter", // but that's not a great name for it, since that implies a thing where @@ -59,42 +60,36 @@ // However, even in such a pathological case, only a single written chunk // would be consumed, and then the rest would wait (un-transformed) until // the results of the previous transformed chunk were consumed. + 'use strict'; module.exports = Transform; - -var _require$codes = require('../errors').codes, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, - ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; - -var Duplex = require('./_stream_duplex'); - +const _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +const Duplex = require('./_stream_duplex'); require('inherits')(Transform, Duplex); - function afterTransform(er, data) { var ts = this._transformState; ts.transforming = false; var cb = ts.writecb; - if (cb === null) { return this.emit('error', new ERR_MULTIPLE_CALLBACK()); } - ts.writechunk = null; ts.writecb = null; - if (data != null) // single equals check for both `null` and `undefined` + if (data != null) + // single equals check for both `null` and `undefined` this.push(data); cb(er); var rs = this._readableState; rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { this._read(rs.highWaterMark); } } - function Transform(options) { if (!(this instanceof Transform)) return new Transform(options); Duplex.call(this, options); @@ -105,39 +100,38 @@ function Transform(options) { writecb: null, writechunk: null, writeencoding: null - }; // start out asking for a readable event once data is transformed. + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; - this._readableState.needReadable = true; // we have implemented the _read method, and done the other things + // we have implemented the _read method, and done the other things // that Readable wants before the first _read call, so unset the // sync guard flag. - this._readableState.sync = false; - if (options) { if (typeof options.transform === 'function') this._transform = options.transform; if (typeof options.flush === 'function') this._flush = options.flush; - } // When the writable side finishes, then flush out anything remaining. - + } + // When the writable side finishes, then flush out anything remaining. this.on('prefinish', prefinish); } - function prefinish() { - var _this = this; - if (typeof this._flush === 'function' && !this._readableState.destroyed) { - this._flush(function (er, data) { - done(_this, er, data); + this._flush((er, data) => { + done(this, er, data); }); } else { done(this, null, null); } } - Transform.prototype.push = function (chunk, encoding) { this._transformState.needTransform = false; return Duplex.prototype.push.call(this, chunk, encoding); -}; // This is the part where you do stuff! +}; + +// This is the part where you do stuff! // override this function in implementation classes. // 'chunk' is an input chunk. // @@ -147,33 +141,27 @@ Transform.prototype.push = function (chunk, encoding) { // Call `cb(err)` when you are done with this chunk. If you pass // an error, then that'll put the hurt on the whole operation. If you // never call cb(), then you'll never get another chunk. - - Transform.prototype._transform = function (chunk, encoding, cb) { cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); }; - Transform.prototype._write = function (chunk, encoding, cb) { var ts = this._transformState; ts.writecb = cb; ts.writechunk = chunk; ts.writeencoding = encoding; - if (!ts.transforming) { var rs = this._readableState; if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); } -}; // Doesn't matter what the args are here. +}; + +// Doesn't matter what the args are here. // _transform does all the work. // That we got here means that the readable side wants more data. - - Transform.prototype._read = function (n) { var ts = this._transformState; - if (ts.writechunk !== null && !ts.transforming) { ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); } else { // mark that we need a transform, so that any data that comes in @@ -181,20 +169,20 @@ Transform.prototype._read = function (n) { ts.needTransform = true; } }; - Transform.prototype._destroy = function (err, cb) { - Duplex.prototype._destroy.call(this, err, function (err2) { + Duplex.prototype._destroy.call(this, err, err2 => { cb(err2); }); }; - function done(stream, er, data) { if (er) return stream.emit('error', er); - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); // TODO(BridgeAR): Write a test for these two error cases + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases // if there's nothing in the write buffer, then that means // that nothing more will ever be provided - if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); return stream.push(null); diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_writable.js index a2634d7c24fd5..feece02279db6 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_writable.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_writable.js @@ -18,185 +18,188 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + // A bit simpler than readable streams. // Implement an async ._write(chunk, encoding, cb), and it'll handle all // the drain event emission and buffering. + 'use strict'; module.exports = Writable; -/* */ +/* */ function WriteReq(chunk, encoding, cb) { this.chunk = chunk; this.encoding = encoding; this.callback = cb; this.next = null; -} // It seems a linked list but it is not -// there will be only 2 of these for each stream - +} +// It seems a linked list but it is not +// there will be only 2 of these for each stream function CorkedRequest(state) { - var _this = this; - this.next = null; this.entry = null; - - this.finish = function () { - onCorkedFinish(_this, state); + this.finish = () => { + onCorkedFinish(this, state); }; } /* */ /**/ - - var Duplex; /**/ Writable.WritableState = WritableState; -/**/ -var internalUtil = { +/**/ +const internalUtil = { deprecate: require('util-deprecate') }; /**/ /**/ - var Stream = require('./internal/streams/stream'); /**/ - -var Buffer = require('buffer').Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - +const Buffer = require('buffer').Buffer; +const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); } - function _isUint8Array(obj) { return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } - -var destroyImpl = require('./internal/streams/destroy'); - -var _require = require('./internal/streams/state'), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = require('../errors').codes, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; - -var errorOrDestroy = destroyImpl.errorOrDestroy; - +const destroyImpl = require('./internal/streams/destroy'); +const _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +const _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +const errorOrDestroy = destroyImpl.errorOrDestroy; require('inherits')(Writable, Stream); - function nop() {} - function WritableState(options, stream, isDuplex) { Duplex = Duplex || require('./_stream_duplex'); - options = options || {}; // Duplex streams are both readable and writable, but share + options = options || {}; + + // Duplex streams are both readable and writable, but share // the same options object. // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream, // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream + // object stream flag to indicate whether or not this stream // contains buffers or objects. - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false // Note: 0 is a valid value, means that we always return false if // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); - this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + // if _final has been called + this.finalCalled = false; - this.finalCalled = false; // drain event flag. + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; - this.needDrain = false; // at the start of calling end() + // has it been destroyed + this.destroyed = false; - this.ending = false; // when end() has been called, and returned - - this.ended = false; // when 'finish' is emitted - - this.finished = false; // has it been destroyed - - this.destroyed = false; // should we decode strings into buffers before passing to _write? + // should we decode strings into buffers before passing to _write? // this is here so that some node-core streams can optimize string // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement + // not an actual buffer we keep track of, but a measurement // of how much we're waiting to get pushed to some underlying // socket or file. + this.length = 0; - this.length = 0; // a flag to see when we're in the middle of a write. + // a flag to see when we're in the middle of a write. + this.writing = false; - this.writing = false; // when true all writes will be buffered until .uncork() call + // when true all writes will be buffered until .uncork() call + this.corked = 0; - this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, + // a flag to be able to tell if the onwrite cb is called immediately, // or on a later tick. We set this to true at first, because any // actions that shouldn't happen until "later" should generally also // not happen before the first write call. + this.sync = true; - this.sync = true; // a flag to know if we're processing previously buffered items, which + // a flag to know if we're processing previously buffered items, which // may call the _write() callback in the same tick, so that we don't // end up in an overlapped onwrite situation. + this.bufferProcessing = false; - this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) - + // the callback that's passed to _write(chunk,cb) this.onwrite = function (er) { onwrite(stream, er); - }; // the callback that the user supplies to write(chunk,encoding,cb) - + }; - this.writecb = null; // the amount that is being written when _write is called. + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + // the amount that is being written when _write is called. this.writelen = 0; this.bufferedRequest = null; - this.lastBufferedRequest = null; // number of pending user-supplied write callbacks + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; - this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs + // emit prefinish if the only thing we're waiting for is _write cbs // This is relevant for synchronous Transform streams + this.prefinished = false; - this.prefinished = false; // True if the error was already emitted and should not be thrown again + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; - this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; - this.autoDestroy = !!options.autoDestroy; // count buffered requests + // count buffered requests + this.bufferedRequestCount = 0; - this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always + // allocate the first CorkedRequest, there is always // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); } - WritableState.prototype.getBuffer = function getBuffer() { var current = this.bufferedRequest; var out = []; - while (current) { out.push(current); current = current.next; } - return out; }; - (function () { try { Object.defineProperty(WritableState.prototype, 'buffer', { @@ -205,12 +208,11 @@ WritableState.prototype.getBuffer = function getBuffer() { }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') }); } catch (_) {} -})(); // Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. - +})(); +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. var realHasInstance; - if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { realHasInstance = Function.prototype[Symbol.hasInstance]; Object.defineProperty(Writable, Symbol.hasInstance, { @@ -225,81 +227,73 @@ if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.protot return object instanceof this; }; } - function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too. + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. // `realHasInstance` is necessary because using plain `instanceof` // would return false, as no `_writableState` property is attached. + // Trying to use the custom `instanceof` for Writable here will also break the // Node.js LazyTransform implementation, which has a non-trivial getter for // `_writableState` that would lead to infinite recursion. + // Checking for a Stream.Duplex instance is faster here instead of inside // the WritableState constructor, at least with V8 6.5 - - var isDuplex = this instanceof Duplex; + const isDuplex = this instanceof Duplex; if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); - this._writableState = new WritableState(options, this, isDuplex); // legacy. + this._writableState = new WritableState(options, this, isDuplex); + // legacy. this.writable = true; - if (options) { if (typeof options.write === 'function') this._write = options.write; if (typeof options.writev === 'function') this._writev = options.writev; if (typeof options.destroy === 'function') this._destroy = options.destroy; if (typeof options.final === 'function') this._final = options.final; } - Stream.call(this); -} // Otherwise people can pipe Writable streams, which is just wrong. - +} +// Otherwise people can pipe Writable streams, which is just wrong. Writable.prototype.pipe = function () { errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); }; - function writeAfterEnd(stream, cb) { - var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb - + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb errorOrDestroy(stream, er); process.nextTick(cb, er); -} // Checks that a user-supplied chunk is valid, especially for the particular +} + +// Checks that a user-supplied chunk is valid, especially for the particular // mode the stream is in. Currently this means that `null` is never accepted // and undefined/non-string values are only allowed in object mode. - - function validChunk(stream, state, chunk, cb) { var er; - if (chunk === null) { er = new ERR_STREAM_NULL_VALUES(); } else if (typeof chunk !== 'string' && !state.objectMode) { er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); } - if (er) { errorOrDestroy(stream, er); process.nextTick(cb, er); return false; } - return true; } - Writable.prototype.write = function (chunk, encoding, cb) { var state = this._writableState; var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); - if (isBuf && !Buffer.isBuffer(chunk)) { chunk = _uint8ArrayToBuffer(chunk); } - if (typeof encoding === 'function') { cb = encoding; encoding = null; } - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; if (typeof cb !== 'function') cb = nop; if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { @@ -308,20 +302,16 @@ Writable.prototype.write = function (chunk, encoding, cb) { } return ret; }; - Writable.prototype.cork = function () { this._writableState.corked++; }; - Writable.prototype.uncork = function () { var state = this._writableState; - if (state.corked) { state.corked--; if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); } }; - Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { // node::ParseEncoding() requires lower case. if (typeof encoding === 'string') encoding = encoding.toLowerCase(); @@ -329,7 +319,6 @@ Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { this._writableState.defaultEncoding = encoding; return this; }; - Object.defineProperty(Writable.prototype, 'writableBuffer', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in @@ -339,15 +328,12 @@ Object.defineProperty(Writable.prototype, 'writableBuffer', { return this._writableState && this._writableState.getBuffer(); } }); - function decodeChunk(state, chunk, encoding) { if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { chunk = Buffer.from(chunk, encoding); } - return chunk; } - Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in @@ -356,51 +342,45 @@ Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { get: function get() { return this._writableState.highWaterMark; } -}); // if we're already writing something, then just put this +}); + +// if we're already writing something, then just put this // in the queue, and wait our turn. Otherwise, call _write // If we return false, then we need a drain event, so set that flag. - function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { if (!isBuf) { var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { isBuf = true; encoding = 'buffer'; chunk = newChunk; } } - var len = state.objectMode ? 1 : chunk.length; state.length += len; - var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. - + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. if (!ret) state.needDrain = true; - if (state.writing || state.corked) { var last = state.lastBufferedRequest; state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, + chunk, + encoding, + isBuf, callback: cb, next: null }; - if (last) { last.next = state.lastBufferedRequest; } else { state.bufferedRequest = state.lastBufferedRequest; } - state.bufferedRequestCount += 1; } else { doWrite(stream, state, false, len, chunk, encoding, cb); } - return ret; } - function doWrite(stream, state, writev, len, chunk, encoding, cb) { state.writelen = len; state.writecb = cb; @@ -409,16 +389,14 @@ function doWrite(stream, state, writev, len, chunk, encoding, cb) { if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); state.sync = false; } - function onwriteError(stream, state, sync, er, cb) { --state.pendingcb; - if (sync) { // defer the callback if we are being called synchronously // to avoid piling up things on the stack - process.nextTick(cb, er); // this can emit finish, and it will always happen + process.nextTick(cb, er); + // this can emit finish, and it will always happen // after error - process.nextTick(finishMaybe, stream, state); stream._writableState.errorEmitted = true; errorOrDestroy(stream, er); @@ -427,20 +405,18 @@ function onwriteError(stream, state, sync, er, cb) { // it is async cb(er); stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); // this can emit finish, but finish must + errorOrDestroy(stream, er); + // this can emit finish, but finish must // always follow error - finishMaybe(stream, state); } } - function onwriteStateUpdate(state) { state.writing = false; state.writecb = null; state.length -= state.writelen; state.writelen = 0; } - function onwrite(stream, er) { var state = stream._writableState; var sync = state.sync; @@ -450,11 +426,9 @@ function onwrite(stream, er) { if (er) onwriteError(stream, state, sync, er, cb);else { // Check if we're actually ready to finish, but don't emit yet var finished = needFinish(state) || stream.destroyed; - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { clearBuffer(stream, state); } - if (sync) { process.nextTick(afterWrite, stream, state, finished, cb); } else { @@ -462,29 +436,27 @@ function onwrite(stream, er) { } } } - function afterWrite(stream, state, finished, cb) { if (!finished) onwriteDrain(stream, state); state.pendingcb--; cb(); finishMaybe(stream, state); -} // Must force callback to be called on nextTick, so that we don't +} + +// Must force callback to be called on nextTick, so that we don't // emit 'drain' before the write() consumer gets the 'false' return // value, and has a chance to attach a 'drain' listener. - - function onwriteDrain(stream, state) { if (state.length === 0 && state.needDrain) { state.needDrain = false; stream.emit('drain'); } -} // if there's something in the buffer waiting, then process it - +} +// if there's something in the buffer waiting, then process it function clearBuffer(stream, state) { state.bufferProcessing = true; var entry = state.bufferedRequest; - if (stream._writev && entry && entry.next) { // Fast case, write everything using _writev() var l = state.bufferedRequestCount; @@ -493,28 +465,25 @@ function clearBuffer(stream, state) { holder.entry = entry; var count = 0; var allBuffers = true; - while (entry) { buffer[count] = entry; if (!entry.isBuf) allBuffers = false; entry = entry.next; count += 1; } - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite state.pendingcb++; state.lastBufferedRequest = null; - if (holder.next) { state.corkedRequestsFree = holder.next; holder.next = null; } else { state.corkedRequestsFree = new CorkedRequest(state); } - state.bufferedRequestCount = 0; } else { // Slow case, write chunks one-by-one @@ -525,32 +494,26 @@ function clearBuffer(stream, state) { var len = state.objectMode ? 1 : chunk.length; doWrite(stream, state, false, len, chunk, encoding, cb); entry = entry.next; - state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then // it means that we need to wait until it does. // also, that means that the chunk and cb are currently // being processed, so move the buffer counter past them. - if (state.writing) { break; } } - if (entry === null) state.lastBufferedRequest = null; } - state.bufferedRequest = entry; state.bufferProcessing = false; } - Writable.prototype._write = function (chunk, encoding, cb) { cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); }; - Writable.prototype._writev = null; - Writable.prototype.end = function (chunk, encoding, cb) { var state = this._writableState; - if (typeof chunk === 'function') { cb = chunk; chunk = null; @@ -559,47 +522,41 @@ Writable.prototype.end = function (chunk, encoding, cb) { cb = encoding; encoding = null; } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks - + // .end() fully uncorks if (state.corked) { state.corked = 1; this.uncork(); - } // ignore unnecessary end() calls. - + } + // ignore unnecessary end() calls. if (!state.ending) endWritable(this, state, cb); return this; }; - Object.defineProperty(Writable.prototype, 'writableLength', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { return this._writableState.length; } }); - function needFinish(state) { return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; } - function callFinal(stream, state) { - stream._final(function (err) { + stream._final(err => { state.pendingcb--; - if (err) { errorOrDestroy(stream, err); } - state.prefinished = true; stream.emit('prefinish'); finishMaybe(stream, state); }); } - function prefinish(stream, state) { if (!state.prefinished && !state.finalCalled) { if (typeof stream._final === 'function' && !state.destroyed) { @@ -612,86 +569,72 @@ function prefinish(stream, state) { } } } - function finishMaybe(stream, state) { var need = needFinish(state); - if (need) { prefinish(stream, state); - if (state.pendingcb === 0) { state.finished = true; stream.emit('finish'); - if (state.autoDestroy) { // In case of duplex streams we need a way to detect // if the readable side is ready for autoDestroy as well - var rState = stream._readableState; - + const rState = stream._readableState; if (!rState || rState.autoDestroy && rState.endEmitted) { stream.destroy(); } } } } - return need; } - function endWritable(stream, state, cb) { state.ending = true; finishMaybe(stream, state); - if (cb) { if (state.finished) process.nextTick(cb);else stream.once('finish', cb); } - state.ended = true; stream.writable = false; } - function onCorkedFinish(corkReq, state, err) { var entry = corkReq.entry; corkReq.entry = null; - while (entry) { var cb = entry.callback; state.pendingcb--; cb(err); entry = entry.next; - } // reuse the free corkReq. - + } + // reuse the free corkReq. state.corkedRequestsFree.next = corkReq; } - Object.defineProperty(Writable.prototype, 'destroyed', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { if (this._writableState === undefined) { return false; } - return this._writableState.destroyed; }, - set: function set(value) { + set(value) { // we ignore the value if the stream // has not been initialized yet if (!this._writableState) { return; - } // backward compatibility, the user is explicitly - // managing destroyed - + } + // backward compatibility, the user is explicitly + // managing destroyed this._writableState.destroyed = value; } }); Writable.prototype.destroy = destroyImpl.destroy; Writable.prototype._undestroy = destroyImpl.undestroy; - Writable.prototype._destroy = function (err, cb) { cb(err); }; \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/async_iterator.js index 9fb615a2f3bc4..bcae6108c0d81 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/async_iterator.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -1,34 +1,26 @@ 'use strict'; -var _Object$setPrototypeO; - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var finished = require('./end-of-stream'); - -var kLastResolve = Symbol('lastResolve'); -var kLastReject = Symbol('lastReject'); -var kError = Symbol('error'); -var kEnded = Symbol('ended'); -var kLastPromise = Symbol('lastPromise'); -var kHandlePromise = Symbol('handlePromise'); -var kStream = Symbol('stream'); - +const finished = require('./end-of-stream'); +const kLastResolve = Symbol('lastResolve'); +const kLastReject = Symbol('lastReject'); +const kError = Symbol('error'); +const kEnded = Symbol('ended'); +const kLastPromise = Symbol('lastPromise'); +const kHandlePromise = Symbol('handlePromise'); +const kStream = Symbol('stream'); function createIterResult(value, done) { return { - value: value, - done: done + value, + done }; } - function readAndResolve(iter) { - var resolve = iter[kLastResolve]; - + const resolve = iter[kLastResolve]; if (resolve !== null) { - var data = iter[kStream].read(); // we defer if data is null + const data = iter[kStream].read(); + // we defer if data is null // we can be expecting either 'end' or // 'error' - if (data !== null) { iter[kLastPromise] = null; iter[kLastResolve] = null; @@ -37,171 +29,157 @@ function readAndResolve(iter) { } } } - function onReadable(iter) { // we wait for the next tick, because it might // emit an error with process.nextTick process.nextTick(readAndResolve, iter); } - function wrapForNext(lastPromise, iter) { - return function (resolve, reject) { - lastPromise.then(function () { + return (resolve, reject) => { + lastPromise.then(() => { if (iter[kEnded]) { resolve(createIterResult(undefined, true)); return; } - iter[kHandlePromise](resolve, reject); }, reject); }; } - -var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); -var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { +const AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ get stream() { return this[kStream]; }, - - next: function next() { - var _this = this; - + next() { // if we have detected an error in the meanwhile // reject straight away - var error = this[kError]; - + const error = this[kError]; if (error !== null) { return Promise.reject(error); } - if (this[kEnded]) { return Promise.resolve(createIterResult(undefined, true)); } - if (this[kStream].destroyed) { // We need to defer via nextTick because if .destroy(err) is // called, the error will be emitted via nextTick, and // we cannot guarantee that there is no error lingering around // waiting to be emitted. - return new Promise(function (resolve, reject) { - process.nextTick(function () { - if (_this[kError]) { - reject(_this[kError]); + return new Promise((resolve, reject) => { + process.nextTick(() => { + if (this[kError]) { + reject(this[kError]); } else { resolve(createIterResult(undefined, true)); } }); }); - } // if we have multiple next() calls + } + + // if we have multiple next() calls // we will wait for the previous Promise to finish // this logic is optimized to support for await loops, // where next() is only called once at a time - - - var lastPromise = this[kLastPromise]; - var promise; - + const lastPromise = this[kLastPromise]; + let promise; if (lastPromise) { promise = new Promise(wrapForNext(lastPromise, this)); } else { // fast path needed to support multiple this.push() // without triggering the next() queue - var data = this[kStream].read(); - + const data = this[kStream].read(); if (data !== null) { return Promise.resolve(createIterResult(data, false)); } - promise = new Promise(this[kHandlePromise]); } - this[kLastPromise] = promise; return promise; - } -}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { - return this; -}), _defineProperty(_Object$setPrototypeO, "return", function _return() { - var _this2 = this; - - // destroy(err, cb) is a private API - // we can guarantee we have that here, because we control the - // Readable class this is attached to - return new Promise(function (resolve, reject) { - _this2[kStream].destroy(null, function (err) { - if (err) { - reject(err); - return; - } - - resolve(createIterResult(undefined, true)); + }, + [Symbol.asyncIterator]() { + return this; + }, + return() { + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise((resolve, reject) => { + this[kStream].destroy(null, err => { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); }); - }); -}), _Object$setPrototypeO), AsyncIteratorPrototype); - -var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { - var _Object$create; - - var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { - value: stream, - writable: true - }), _defineProperty(_Object$create, kLastResolve, { - value: null, - writable: true - }), _defineProperty(_Object$create, kLastReject, { - value: null, - writable: true - }), _defineProperty(_Object$create, kError, { - value: null, - writable: true - }), _defineProperty(_Object$create, kEnded, { - value: stream._readableState.endEmitted, - writable: true - }), _defineProperty(_Object$create, kHandlePromise, { - value: function value(resolve, reject) { - var data = iterator[kStream].read(); - - if (data) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(data, false)); - } else { - iterator[kLastResolve] = resolve; - iterator[kLastReject] = reject; - } + } +}, AsyncIteratorPrototype); +const createReadableStreamAsyncIterator = stream => { + const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, { + [kStream]: { + value: stream, + writable: true + }, + [kLastResolve]: { + value: null, + writable: true + }, + [kLastReject]: { + value: null, + writable: true }, - writable: true - }), _Object$create)); + [kError]: { + value: null, + writable: true + }, + [kEnded]: { + value: stream._readableState.endEmitted, + writable: true + }, + // the function passed to new Promise + // is cached so we avoid allocating a new + // closure at every run + [kHandlePromise]: { + value: (resolve, reject) => { + const data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + } + }); iterator[kLastPromise] = null; - finished(stream, function (err) { + finished(stream, err => { if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise + const reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise // returned by next() and store the error - if (reject !== null) { iterator[kLastPromise] = null; iterator[kLastResolve] = null; iterator[kLastReject] = null; reject(err); } - iterator[kError] = err; return; } - - var resolve = iterator[kLastResolve]; - + const resolve = iterator[kLastResolve]; if (resolve !== null) { iterator[kLastPromise] = null; iterator[kLastResolve] = null; iterator[kLastReject] = null; resolve(createIterResult(undefined, true)); } - iterator[kEnded] = true; }); stream.on('readable', onReadable.bind(null, iterator)); return iterator; }; - module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/buffer_list.js index cdea425f19dd9..352ac3438ef2a 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/buffer_list.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -1,210 +1,155 @@ 'use strict'; -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -var _require = require('buffer'), - Buffer = _require.Buffer; - -var _require2 = require('util'), - inspect = _require2.inspect; - -var custom = inspect && inspect.custom || 'inspect'; - +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +const _require = require('buffer'), + Buffer = _require.Buffer; +const _require2 = require('util'), + inspect = _require2.inspect; +const custom = inspect && inspect.custom || 'inspect'; function copyBuffer(src, target, offset) { Buffer.prototype.copy.call(src, target, offset); } - -module.exports = -/*#__PURE__*/ -function () { - function BufferList() { - _classCallCheck(this, BufferList); - +module.exports = class BufferList { + constructor() { this.head = null; this.tail = null; this.length = 0; } - - _createClass(BufferList, [{ - key: "push", - value: function push(v) { - var entry = { - data: v, - next: null - }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - } - }, { - key: "unshift", - value: function unshift(v) { - var entry = { - data: v, - next: this.head - }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - } - }, { - key: "shift", - value: function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - } - }, { - key: "clear", - value: function clear() { - this.head = this.tail = null; - this.length = 0; - } - }, { - key: "join", - value: function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - - while (p = p.next) { - ret += s + p.data; - } - - return ret; + push(v) { + const entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + unshift(v) { + const entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + shift() { + if (this.length === 0) return; + const ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + clear() { + this.head = this.tail = null; + this.length = 0; + } + join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + concat(n) { + if (this.length === 0) return Buffer.alloc(0); + const ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; } - }, { - key: "concat", - value: function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - - return ret; - } // Consumes a specified amount of bytes or characters from the buffered data. - - }, { - key: "consume", - value: function consume(n, hasStrings) { - var ret; - - if (n < this.head.data.length) { - // `slice` is the same for buffers and strings. - ret = this.head.data.slice(0, n); - this.head.data = this.head.data.slice(n); - } else if (n === this.head.data.length) { - // First chunk is a perfect match. - ret = this.shift(); - } else { - // Result spans more than one buffer. - ret = hasStrings ? this._getString(n) : this._getBuffer(n); - } + return ret; + } - return ret; + // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); } - }, { - key: "first", - value: function first() { - return this.head.data; - } // Consumes a specified amount of characters from the buffered data. - - }, { - key: "_getString", - value: function _getString(n) { - var p = this.head; - var c = 1; - var ret = p.data; - n -= ret.length; - - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = str.slice(nb); - } + return ret; + } + first() { + return this.head.data; + } - break; + // Consumes a specified amount of characters from the buffered data. + _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + const str = p.data; + const nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); } - - ++c; + break; } + ++c; + } + this.length -= c; + return ret; + } - this.length -= c; - return ret; - } // Consumes a specified amount of bytes from the buffered data. - - }, { - key: "_getBuffer", - value: function _getBuffer(n) { - var ret = Buffer.allocUnsafe(n); - var p = this.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = buf.slice(nb); - } - - break; + // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { + const ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + const buf = p.data; + const nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); } - - ++c; + break; } - - this.length -= c; - return ret; - } // Make sure the linked list only shows the minimal necessary information. - - }, { - key: custom, - value: function value(_, options) { - return inspect(this, _objectSpread({}, options, { - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - })); + ++c; } - }]); + this.length -= c; + return ret; + } - return BufferList; -}(); \ No newline at end of file + // Make sure the linked list only shows the minimal necessary information. + [custom](_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } +}; \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/destroy.js index 3268a16f3b6f2..7e8275567dc35 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/destroy.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -1,11 +1,9 @@ -'use strict'; // undocumented cb() API, needed for core, not for public API +'use strict'; +// undocumented cb() API, needed for core, not for public API function destroy(err, cb) { - var _this = this; - - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; - + const readableDestroyed = this._readableState && this._readableState.destroyed; + const writableDestroyed = this._writableState && this._writableState.destroyed; if (readableDestroyed || writableDestroyed) { if (cb) { cb(err); @@ -17,53 +15,48 @@ function destroy(err, cb) { process.nextTick(emitErrorNT, this, err); } } - return this; - } // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks + } + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks if (this._readableState) { this._readableState.destroyed = true; - } // if this is a duplex stream mark the writable part as destroyed as well - + } + // if this is a duplex stream mark the writable part as destroyed as well if (this._writableState) { this._writableState.destroyed = true; } - - this._destroy(err || null, function (err) { + this._destroy(err || null, err => { if (!cb && err) { - if (!_this._writableState) { - process.nextTick(emitErrorAndCloseNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - process.nextTick(emitErrorAndCloseNT, _this, err); + if (!this._writableState) { + process.nextTick(emitErrorAndCloseNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, this, err); } else { - process.nextTick(emitCloseNT, _this); + process.nextTick(emitCloseNT, this); } } else if (cb) { - process.nextTick(emitCloseNT, _this); + process.nextTick(emitCloseNT, this); cb(err); } else { - process.nextTick(emitCloseNT, _this); + process.nextTick(emitCloseNT, this); } }); - return this; } - function emitErrorAndCloseNT(self, err) { emitErrorNT(self, err); emitCloseNT(self); } - function emitCloseNT(self) { if (self._writableState && !self._writableState.emitClose) return; if (self._readableState && !self._readableState.emitClose) return; self.emit('close'); } - function undestroy() { if (this._readableState) { this._readableState.destroyed = false; @@ -71,7 +64,6 @@ function undestroy() { this._readableState.ended = false; this._readableState.endEmitted = false; } - if (this._writableState) { this._writableState.destroyed = false; this._writableState.ended = false; @@ -82,24 +74,22 @@ function undestroy() { this._writableState.errorEmitted = false; } } - function emitErrorNT(self, err) { self.emit('error', err); } - function errorOrDestroy(stream, err) { // We have tests that rely on errors being emitted // in the same tick, so changing this is semver major. // For now when you opt-in to autoDestroy we allow // the error to be emitted nextTick. In a future // semver major update we should change the default to this. - var rState = stream._readableState; - var wState = stream._writableState; + + const rState = stream._readableState; + const wState = stream._writableState; if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); } - module.exports = { - destroy: destroy, - undestroy: undestroy, - errorOrDestroy: errorOrDestroy + destroy, + undestroy, + errorOrDestroy }; \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/end-of-stream.js index 831f286d98fa9..b6d101691f187 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/end-of-stream.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -1,78 +1,62 @@ // Ported from https://github.com/mafintosh/end-of-stream with // permission from the author, Mathias Buus (@mafintosh). -'use strict'; -var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +'use strict'; +const ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; function once(callback) { - var called = false; + let called = false; return function () { if (called) return; called = true; - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } - callback.apply(this, args); }; } - function noop() {} - function isRequest(stream) { return stream.setHeader && typeof stream.abort === 'function'; } - function eos(stream, opts, callback) { if (typeof opts === 'function') return eos(stream, null, opts); if (!opts) opts = {}; callback = once(callback || noop); - var readable = opts.readable || opts.readable !== false && stream.readable; - var writable = opts.writable || opts.writable !== false && stream.writable; - - var onlegacyfinish = function onlegacyfinish() { + let readable = opts.readable || opts.readable !== false && stream.readable; + let writable = opts.writable || opts.writable !== false && stream.writable; + const onlegacyfinish = () => { if (!stream.writable) onfinish(); }; - var writableEnded = stream._writableState && stream._writableState.finished; - - var onfinish = function onfinish() { + const onfinish = () => { writable = false; writableEnded = true; if (!readable) callback.call(stream); }; - var readableEnded = stream._readableState && stream._readableState.endEmitted; - - var onend = function onend() { + const onend = () => { readable = false; readableEnded = true; if (!writable) callback.call(stream); }; - - var onerror = function onerror(err) { + const onerror = err => { callback.call(stream, err); }; - - var onclose = function onclose() { - var err; - + const onclose = () => { + let err; if (readable && !readableEnded) { if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); return callback.call(stream, err); } - if (writable && !writableEnded) { if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); return callback.call(stream, err); } }; - - var onrequest = function onrequest() { + const onrequest = () => { stream.req.on('finish', onfinish); }; - if (isRequest(stream)) { stream.on('complete', onfinish); stream.on('abort', onclose); @@ -82,7 +66,6 @@ function eos(stream, opts, callback) { stream.on('end', onlegacyfinish); stream.on('close', onlegacyfinish); } - stream.on('end', onend); stream.on('finish', onfinish); if (opts.error !== false) stream.on('error', onerror); @@ -100,5 +83,4 @@ function eos(stream, opts, callback) { stream.removeListener('close', onclose); }; } - module.exports = eos; \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/from.js b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/from.js index 6c41284416799..4ca2cd1996d28 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/from.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/from.js @@ -1,52 +1,42 @@ 'use strict'; function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; - +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +const ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; function from(Readable, iterable, opts) { - var iterator; - + let iterator; if (iterable && typeof iterable.next === 'function') { iterator = iterable; } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); - - var readable = new Readable(_objectSpread({ + const readable = new Readable(_objectSpread({ objectMode: true - }, opts)); // Reading boolean to protect against _read + }, opts)); + // Reading boolean to protect against _read // being called before last iteration completion. - - var reading = false; - + let reading = false; readable._read = function () { if (!reading) { reading = true; next(); } }; - function next() { return _next2.apply(this, arguments); } - function _next2() { _next2 = _asyncToGenerator(function* () { try { - var _ref = yield iterator.next(), - value = _ref.value, - done = _ref.done; - + const _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; if (done) { readable.push(null); - } else if (readable.push((yield value))) { + } else if (readable.push(yield value)) { next(); } else { reading = false; @@ -57,8 +47,6 @@ function from(Readable, iterable, opts) { }); return _next2.apply(this, arguments); } - return readable; } - module.exports = from; \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/pipeline.js b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/pipeline.js index 6589909889c58..272546db82513 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/pipeline.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -1,88 +1,78 @@ // Ported from https://github.com/mafintosh/pump with // permission from the author, Mathias Buus (@mafintosh). -'use strict'; -var eos; +'use strict'; +let eos; function once(callback) { - var called = false; + let called = false; return function () { if (called) return; called = true; - callback.apply(void 0, arguments); + callback(...arguments); }; } - -var _require$codes = require('../../../errors').codes, - ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; - +const _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; function noop(err) { // Rethrow the error if it exists to avoid swallowing it if (err) throw err; } - function isRequest(stream) { return stream.setHeader && typeof stream.abort === 'function'; } - function destroyer(stream, reading, writing, callback) { callback = once(callback); - var closed = false; - stream.on('close', function () { + let closed = false; + stream.on('close', () => { closed = true; }); if (eos === undefined) eos = require('./end-of-stream'); eos(stream, { readable: reading, writable: writing - }, function (err) { + }, err => { if (err) return callback(err); closed = true; callback(); }); - var destroyed = false; - return function (err) { + let destroyed = false; + return err => { if (closed) return; if (destroyed) return; - destroyed = true; // request.destroy just do .end - .abort is what we want + destroyed = true; + // request.destroy just do .end - .abort is what we want if (isRequest(stream)) return stream.abort(); if (typeof stream.destroy === 'function') return stream.destroy(); callback(err || new ERR_STREAM_DESTROYED('pipe')); }; } - function call(fn) { fn(); } - function pipe(from, to) { return from.pipe(to); } - function popCallback(streams) { if (!streams.length) return noop; if (typeof streams[streams.length - 1] !== 'function') return noop; return streams.pop(); } - function pipeline() { for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { streams[_key] = arguments[_key]; } - - var callback = popCallback(streams); + const callback = popCallback(streams); if (Array.isArray(streams[0])) streams = streams[0]; - if (streams.length < 2) { throw new ERR_MISSING_ARGS('streams'); } - - var error; - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1; - var writing = i > 0; + let error; + const destroys = streams.map(function (stream, i) { + const reading = i < streams.length - 1; + const writing = i > 0; return destroyer(stream, reading, writing, function (err) { if (!error) error = err; if (err) destroys.forEach(call); @@ -93,5 +83,4 @@ function pipeline() { }); return streams.reduce(pipe); } - module.exports = pipeline; \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/state.js b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/state.js index 19887eb8a9070..8a994b4422a2d 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/state.js +++ b/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/state.js @@ -1,27 +1,22 @@ 'use strict'; -var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; - +const ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; function highWaterMarkFrom(options, isDuplex, duplexKey) { return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; } - function getHighWaterMark(state, options, duplexKey, isDuplex) { - var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); - + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); if (hwm != null) { if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { - var name = isDuplex ? duplexKey : 'highWaterMark'; + const name = isDuplex ? duplexKey : 'highWaterMark'; throw new ERR_INVALID_OPT_VALUE(name, hwm); } - return Math.floor(hwm); - } // Default value - + } + // Default value return state.objectMode ? 16 : 16 * 1024; } - module.exports = { - getHighWaterMark: getHighWaterMark + getHighWaterMark }; \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/readable-stream/package.json b/node_modules/node-gyp/node_modules/readable-stream/package.json index 0b0c4bd207ace..4cd81b628d572 100644 --- a/node_modules/node-gyp/node_modules/readable-stream/package.json +++ b/node_modules/node-gyp/node_modules/readable-stream/package.json @@ -1,6 +1,6 @@ { "name": "readable-stream", - "version": "3.6.0", + "version": "3.6.1", "description": "Streams3, a user-land copy of the stream library from Node.js", "main": "readable.js", "engines": { diff --git a/node_modules/spdx-correct/index.js b/node_modules/spdx-correct/index.js index c51a79f5d1c80..4d9037e0cc435 100644 --- a/node_modules/spdx-correct/index.js +++ b/node_modules/spdx-correct/index.js @@ -25,6 +25,18 @@ function valid (string) { } } +// Sorting function that orders the given array of transpositions such +// that a transposition with the longer pattern comes before a transposition +// with a shorter pattern. This is to prevent e.g. the transposition +// ["General Public License", "GPL"] from matching to "Lesser General Public License" +// before a longer and more accurate transposition ["Lesser General Public License", "LGPL"] +// has a chance to be recognized. +function sortTranspositions(a, b) { + var length = b[0].length - a[0].length + if (length !== 0) return length + return a[0].toUpperCase().localeCompare(b[0].toUpperCase()) +} + // Common transpositions of license identifier acronyms var transpositions = [ ['APGL', 'AGPL'], @@ -41,8 +53,17 @@ var transpositions = [ ['GUN', 'GPL'], ['+', ''], ['GNU GPL', 'GPL'], + ['GNU LGPL', 'LGPL'], ['GNU/GPL', 'GPL'], ['GNU GLP', 'GPL'], + ['GNU LESSER GENERAL PUBLIC LICENSE', 'LGPL'], + ['GNU Lesser General Public License', 'LGPL'], + ['GNU LESSER GENERAL PUBLIC LICENSE', 'LGPL-2.1'], + ['GNU Lesser General Public License', 'LGPL-2.1'], + ['LESSER GENERAL PUBLIC LICENSE', 'LGPL'], + ['Lesser General Public License', 'LGPL'], + ['LESSER GENERAL PUBLIC LICENSE', 'LGPL-2.1'], + ['Lesser General Public License', 'LGPL-2.1'], ['GNU General Public License', 'GPL'], ['Gnu public license', 'GPL'], ['GNU Public License', 'GPL'], @@ -51,8 +72,9 @@ var transpositions = [ ['Mozilla Public License', 'MPL'], ['Universal Permissive License', 'UPL'], ['WTH', 'WTF'], + ['WTFGPL', 'WTFPL'], ['-License', ''] -] +].sort(sortTranspositions) var TRANSPOSED = 0 var CORRECT = 1 @@ -254,7 +276,7 @@ var lastResorts = [ ['MPL', 'MPL-2.0'], ['X11', 'X11'], ['ZLIB', 'Zlib'] -].concat(licensesWithOneVersion) +].concat(licensesWithOneVersion).sort(sortTranspositions) var SUBSTRING = 0 var IDENTIFIER = 1 diff --git a/node_modules/spdx-correct/package.json b/node_modules/spdx-correct/package.json index 35c68bdaa6c61..d77615638be66 100644 --- a/node_modules/spdx-correct/package.json +++ b/node_modules/spdx-correct/package.json @@ -1,24 +1,17 @@ { "name": "spdx-correct", "description": "correct invalid SPDX expressions", - "version": "3.1.1", - "author": "Kyle E. Mitchell (https://kemitchell.com)", - "contributors": [ - "Kyle E. Mitchell (https://kemitchell.com)", - "Christian Zommerfelds ", - "Tal Einat ", - "Dan Butvinik " - ], + "version": "3.2.0", "dependencies": { "spdx-expression-parse": "^3.0.0", "spdx-license-ids": "^3.0.0" }, "devDependencies": { - "defence-cli": "^2.0.1", + "defence-cli": "^3.0.1", "replace-require-self": "^1.0.0", - "standard": "^11.0.0", - "standard-markdown": "^4.0.2", - "tape": "^4.9.0" + "standard": "^14.3.4", + "standard-markdown": "^6.0.0", + "tape": "^5.0.1" }, "files": [ "index.js" diff --git a/node_modules/tuf-js/LICENSE b/node_modules/tuf-js/LICENSE index f28ab0914a319..420700f5d3765 100644 --- a/node_modules/tuf-js/LICENSE +++ b/node_modules/tuf-js/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright GitHub +Copyright (c) 2022 GitHub and the TUF Contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/node_modules/tuf-js/dist/utils/config.d.ts b/node_modules/tuf-js/dist/config.d.ts similarity index 100% rename from node_modules/tuf-js/dist/utils/config.d.ts rename to node_modules/tuf-js/dist/config.d.ts diff --git a/node_modules/tuf-js/dist/utils/config.js b/node_modules/tuf-js/dist/config.js similarity index 100% rename from node_modules/tuf-js/dist/utils/config.js rename to node_modules/tuf-js/dist/config.js diff --git a/node_modules/tuf-js/dist/error.d.ts b/node_modules/tuf-js/dist/error.d.ts index 130e49ab9dfd2..3a42f0441b68f 100644 --- a/node_modules/tuf-js/dist/error.d.ts +++ b/node_modules/tuf-js/dist/error.d.ts @@ -6,20 +6,12 @@ export declare class PersistError extends Error { } export declare class RepositoryError extends Error { } -export declare class UnsignedMetadataError extends RepositoryError { -} export declare class BadVersionError extends RepositoryError { } export declare class EqualVersionError extends BadVersionError { } export declare class ExpiredMetadataError extends RepositoryError { } -export declare class LengthOrHashMismatchError extends RepositoryError { -} -export declare class CryptoError extends Error { -} -export declare class UnsupportedAlgorithmError extends CryptoError { -} export declare class DownloadError extends Error { } export declare class DownloadLengthMismatchError extends DownloadError { diff --git a/node_modules/tuf-js/dist/error.js b/node_modules/tuf-js/dist/error.js index ce7ca5ea06ceb..f4b10fa202895 100644 --- a/node_modules/tuf-js/dist/error.js +++ b/node_modules/tuf-js/dist/error.js @@ -1,6 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.UnsignedMetadataError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0; +exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0; // An error about insufficient values class ValueError extends Error { } @@ -17,10 +17,6 @@ exports.PersistError = PersistError; class RepositoryError extends Error { } exports.RepositoryError = RepositoryError; -// An error about metadata object with insufficient threshold of signatures. -class UnsignedMetadataError extends RepositoryError { -} -exports.UnsignedMetadataError = UnsignedMetadataError; // An error for metadata that contains an invalid version number. class BadVersionError extends RepositoryError { } @@ -33,16 +29,6 @@ exports.EqualVersionError = EqualVersionError; class ExpiredMetadataError extends RepositoryError { } exports.ExpiredMetadataError = ExpiredMetadataError; -// An error while checking the length and hash values of an object. -class LengthOrHashMismatchError extends RepositoryError { -} -exports.LengthOrHashMismatchError = LengthOrHashMismatchError; -class CryptoError extends Error { -} -exports.CryptoError = CryptoError; -class UnsupportedAlgorithmError extends CryptoError { -} -exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; //----- Download Errors ------------------------------------------------------- // An error occurred while attempting to download a file. class DownloadError extends Error { diff --git a/node_modules/tuf-js/dist/fetcher.d.ts b/node_modules/tuf-js/dist/fetcher.d.ts index 2b52cbef52326..126e9eb11afc0 100644 --- a/node_modules/tuf-js/dist/fetcher.d.ts +++ b/node_modules/tuf-js/dist/fetcher.d.ts @@ -1,7 +1,11 @@ /// /// type DownloadFileHandler = (file: string) => Promise; -export declare abstract class BaseFetcher { +export interface Fetcher { + downloadFile(url: string, maxLength: number, handler: DownloadFileHandler): Promise; + downloadBytes(url: string, maxLength: number): Promise; +} +export declare abstract class BaseFetcher implements Fetcher { abstract fetch(url: string): Promise; downloadFile(url: string, maxLength: number, handler: DownloadFileHandler): Promise; downloadBytes(url: string, maxLength: number): Promise; @@ -10,7 +14,7 @@ interface FetcherOptions { timeout?: number; retries?: number; } -export declare class Fetcher extends BaseFetcher { +export declare class DefaultFetcher extends BaseFetcher { private timeout?; private retries?; constructor(options?: FetcherOptions); diff --git a/node_modules/tuf-js/dist/fetcher.js b/node_modules/tuf-js/dist/fetcher.js index cb42ab22a1d31..7a7405ac53e72 100644 --- a/node_modules/tuf-js/dist/fetcher.js +++ b/node_modules/tuf-js/dist/fetcher.js @@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.Fetcher = exports.BaseFetcher = void 0; +exports.DefaultFetcher = exports.BaseFetcher = void 0; const fs_1 = __importDefault(require("fs")); const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); const util_1 = __importDefault(require("util")); @@ -51,7 +51,7 @@ class BaseFetcher { } } exports.BaseFetcher = BaseFetcher; -class Fetcher extends BaseFetcher { +class DefaultFetcher extends BaseFetcher { constructor(options = {}) { super(); this.timeout = options.timeout; @@ -68,7 +68,7 @@ class Fetcher extends BaseFetcher { return response.body; } } -exports.Fetcher = Fetcher; +exports.DefaultFetcher = DefaultFetcher; const writeBufferToStream = async (stream, buffer) => { return new Promise((resolve, reject) => { stream.write(buffer, (err) => { diff --git a/node_modules/tuf-js/dist/index.d.ts b/node_modules/tuf-js/dist/index.d.ts index bfe3adcac2aef..b4d1bc2b9c873 100644 --- a/node_modules/tuf-js/dist/index.d.ts +++ b/node_modules/tuf-js/dist/index.d.ts @@ -1,3 +1,3 @@ -export { BaseFetcher } from './fetcher'; -export { TargetFile } from './models/file'; +export { TargetFile } from '@tufjs/models'; +export { BaseFetcher, Fetcher } from './fetcher'; export { Updater } from './updater'; diff --git a/node_modules/tuf-js/dist/index.js b/node_modules/tuf-js/dist/index.js index 6245d1724a208..5a83b91f355d8 100644 --- a/node_modules/tuf-js/dist/index.js +++ b/node_modules/tuf-js/dist/index.js @@ -1,9 +1,9 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.Updater = exports.TargetFile = exports.BaseFetcher = void 0; +exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0; +var models_1 = require("@tufjs/models"); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } }); var fetcher_1 = require("./fetcher"); Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } }); -var file_1 = require("./models/file"); -Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); var updater_1 = require("./updater"); Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } }); diff --git a/node_modules/tuf-js/dist/models/index.d.ts b/node_modules/tuf-js/dist/models/index.d.ts deleted file mode 100644 index 58d779159215b..0000000000000 --- a/node_modules/tuf-js/dist/models/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { Metadata } from './metadata'; -export { Root } from './root'; -export { Snapshot } from './snapshot'; -export { Targets } from './targets'; -export { Timestamp } from './timestamp'; diff --git a/node_modules/tuf-js/dist/models/index.js b/node_modules/tuf-js/dist/models/index.js deleted file mode 100644 index aa3d828cf9b43..0000000000000 --- a/node_modules/tuf-js/dist/models/index.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Timestamp = exports.Targets = exports.Snapshot = exports.Root = exports.Metadata = void 0; -var metadata_1 = require("./metadata"); -Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); -var root_1 = require("./root"); -Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); -var snapshot_1 = require("./snapshot"); -Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); -var targets_1 = require("./targets"); -Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); -var timestamp_1 = require("./timestamp"); -Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/node_modules/tuf-js/dist/store.d.ts b/node_modules/tuf-js/dist/store.d.ts index a6e20ae559c8b..aed13b300d468 100644 --- a/node_modules/tuf-js/dist/store.d.ts +++ b/node_modules/tuf-js/dist/store.d.ts @@ -1,5 +1,5 @@ /// -import { Metadata, Root, Snapshot, Targets, Timestamp } from './models'; +import { Metadata, Root, Snapshot, Targets, Timestamp } from '@tufjs/models'; export declare class TrustedMetadataStore { private trustedSet; private referenceTime; diff --git a/node_modules/tuf-js/dist/store.js b/node_modules/tuf-js/dist/store.js index 351a1961730bc..8567336108709 100644 --- a/node_modules/tuf-js/dist/store.js +++ b/node_modules/tuf-js/dist/store.js @@ -1,9 +1,8 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.TrustedMetadataStore = void 0; +const models_1 = require("@tufjs/models"); const error_1 = require("./error"); -const models_1 = require("./models"); -const types_1 = require("./utils/types"); class TrustedMetadataStore { constructor(rootData) { this.trustedSet = {}; @@ -32,18 +31,18 @@ class TrustedMetadataStore { } updateRoot(bytesBuffer) { const data = JSON.parse(bytesBuffer.toString('utf8')); - const newRoot = models_1.Metadata.fromJSON(types_1.MetadataKind.Root, data); - if (newRoot.signed.type != types_1.MetadataKind.Root) { + const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (newRoot.signed.type != models_1.MetadataKind.Root) { throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`); } // Client workflow 5.4: check for arbitrary software attack - this.root.verifyDelegate(types_1.MetadataKind.Root, newRoot); + this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot); // Client workflow 5.5: check for rollback attack if (newRoot.signed.version != this.root.signed.version + 1) { throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`); } // Check that new root is signed by self - newRoot.verifyDelegate(types_1.MetadataKind.Root, newRoot); + newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot); // Client workflow 5.7: set new root as trusted root this.trustedSet.root = newRoot; return newRoot; @@ -56,12 +55,12 @@ class TrustedMetadataStore { throw new error_1.ExpiredMetadataError('Final root.json is expired'); } const data = JSON.parse(bytesBuffer.toString('utf8')); - const newTimestamp = models_1.Metadata.fromJSON(types_1.MetadataKind.Timestamp, data); - if (newTimestamp.signed.type != types_1.MetadataKind.Timestamp) { + const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data); + if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) { throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`); } // Client workflow 5.4.2: check for arbitrary software attack - this.root.verifyDelegate(types_1.MetadataKind.Timestamp, newTimestamp); + this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp); if (this.timestamp) { // Prevent rolling back timestamp version // Client workflow 5.4.3.1: check for rollback attack @@ -104,12 +103,12 @@ class TrustedMetadataStore { snapshotMeta.verify(bytesBuffer); } const data = JSON.parse(bytesBuffer.toString('utf8')); - const newSnapshot = models_1.Metadata.fromJSON(types_1.MetadataKind.Snapshot, data); - if (newSnapshot.signed.type != types_1.MetadataKind.Snapshot) { + const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data); + if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) { throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`); } // Client workflow 5.5.3: check for arbitrary software attack - this.root.verifyDelegate(types_1.MetadataKind.Snapshot, newSnapshot); + this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot); // version check against meta version (5.5.4) is deferred to allow old // snapshot to be used in rollback protection // Client workflow 5.5.5: check for rollback attack @@ -149,8 +148,8 @@ class TrustedMetadataStore { // Client workflow 5.6.2: check against snapshot role's targets hash meta.verify(bytesBuffer); const data = JSON.parse(bytesBuffer.toString('utf8')); - const newDelegate = models_1.Metadata.fromJSON(types_1.MetadataKind.Targets, data); - if (newDelegate.signed.type != types_1.MetadataKind.Targets) { + const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data); + if (newDelegate.signed.type != models_1.MetadataKind.Targets) { throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`); } // Client workflow 5.6.3: check for arbitrary software attack @@ -170,11 +169,11 @@ class TrustedMetadataStore { // Note that an expired initial root is still considered valid. loadTrustedRoot(bytesBuffer) { const data = JSON.parse(bytesBuffer.toString('utf8')); - const root = models_1.Metadata.fromJSON(types_1.MetadataKind.Root, data); - if (root.signed.type != types_1.MetadataKind.Root) { + const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (root.signed.type != models_1.MetadataKind.Root) { throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`); } - root.verifyDelegate(types_1.MetadataKind.Root, root); + root.verifyDelegate(models_1.MetadataKind.Root, root); this.trustedSet['root'] = root; } checkFinalTimestamp() { diff --git a/node_modules/tuf-js/dist/updater.d.ts b/node_modules/tuf-js/dist/updater.d.ts index e49dca22a43d3..9da17d74714cf 100644 --- a/node_modules/tuf-js/dist/updater.d.ts +++ b/node_modules/tuf-js/dist/updater.d.ts @@ -1,12 +1,12 @@ -import { BaseFetcher } from './fetcher'; -import { TargetFile } from './models/file'; -import { Config } from './utils/config'; +import { TargetFile } from '@tufjs/models'; +import { Config } from './config'; +import { Fetcher } from './fetcher'; export interface UpdaterOptions { metadataDir: string; metadataBaseUrl: string; targetDir?: string; targetBaseUrl?: string; - fetcher?: BaseFetcher; + fetcher?: Fetcher; config?: Partial; } export declare class Updater { diff --git a/node_modules/tuf-js/dist/updater.js b/node_modules/tuf-js/dist/updater.js index 9f33c667ce9b2..7f8b6bedeedd3 100644 --- a/node_modules/tuf-js/dist/updater.js +++ b/node_modules/tuf-js/dist/updater.js @@ -24,13 +24,13 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Updater = void 0; +const models_1 = require("@tufjs/models"); const fs = __importStar(require("fs")); const path = __importStar(require("path")); +const config_1 = require("./config"); const error_1 = require("./error"); const fetcher_1 = require("./fetcher"); const store_1 = require("./store"); -const config_1 = require("./utils/config"); -const types_1 = require("./utils/types"); class Updater { constructor(options) { const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options; @@ -38,12 +38,12 @@ class Updater { this.metadataBaseUrl = metadataBaseUrl; this.targetDir = targetDir; this.targetBaseUrl = targetBaseUrl; - const data = this.loadLocalMetadata(types_1.MetadataKind.Root); + const data = this.loadLocalMetadata(models_1.MetadataKind.Root); this.trustedSet = new store_1.TrustedMetadataStore(data); this.config = { ...config_1.defaultConfig, ...config }; this.fetcher = fetcher || - new fetcher_1.Fetcher({ + new fetcher_1.DefaultFetcher({ timeout: this.config.fetchTimeout, retries: this.config.fetchRetries, }); @@ -52,7 +52,7 @@ class Updater { await this.loadRoot(); await this.loadTimestamp(); await this.loadSnapshot(); - await this.loadTargets(types_1.MetadataKind.Targets, types_1.MetadataKind.Root); + await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root); } // Returns the TargetFile instance with information for the given target path. // @@ -123,7 +123,7 @@ class Updater { // Client workflow 5.3.4 - 5.4.7 this.trustedSet.updateRoot(bytesData); // Client workflow 5.3.8: persist root metadata file - this.persistMetadata(types_1.MetadataKind.Root, bytesData); + this.persistMetadata(models_1.MetadataKind.Root, bytesData); } catch (error) { break; @@ -135,7 +135,7 @@ class Updater { async loadTimestamp() { // Load local and remote timestamp metadata try { - const data = this.loadLocalMetadata(types_1.MetadataKind.Timestamp); + const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp); this.trustedSet.updateTimestamp(data); } catch (error) { @@ -159,14 +159,14 @@ class Updater { throw error; } // Client workflow 5.4.5: persist timestamp metadata - this.persistMetadata(types_1.MetadataKind.Timestamp, bytesData); + this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData); } // Load local and remote snapshot metadata. // Client workflow 5.5: update snapshot role async loadSnapshot() { //Load local (and if needed remote) snapshot metadata try { - const data = this.loadLocalMetadata(types_1.MetadataKind.Snapshot); + const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot); this.trustedSet.updateSnapshot(data, true); } catch (error) { @@ -185,7 +185,7 @@ class Updater { // Client workflow 5.5.2 - 5.5.6 this.trustedSet.updateSnapshot(bytesData); // Client workflow 5.5.7: persist snapshot metadata file - this.persistMetadata(types_1.MetadataKind.Snapshot, bytesData); + this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData); } catch (error) { throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`); @@ -236,8 +236,8 @@ class Updater { // is needed to load and verify the delegated targets metadata. const delegationsToVisit = [ { - roleName: types_1.MetadataKind.Targets, - parentRoleName: types_1.MetadataKind.Root, + roleName: models_1.MetadataKind.Targets, + parentRoleName: models_1.MetadataKind.Root, }, ]; const visitedRoleNames = new Set(); diff --git a/node_modules/tuf-js/dist/utils/index.d.ts b/node_modules/tuf-js/dist/utils/index.d.ts deleted file mode 100644 index e2232bc5cceab..0000000000000 --- a/node_modules/tuf-js/dist/utils/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -export * as config from './config'; -export * as guard from './guard'; -export * as json from './json'; -export * as signer from './signer'; -export * as types from './types'; diff --git a/node_modules/tuf-js/dist/utils/types.js b/node_modules/tuf-js/dist/utils/types.js deleted file mode 100644 index 469f580743f65..0000000000000 --- a/node_modules/tuf-js/dist/utils/types.js +++ /dev/null @@ -1,10 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MetadataKind = void 0; -var MetadataKind; -(function (MetadataKind) { - MetadataKind["Root"] = "root"; - MetadataKind["Timestamp"] = "timestamp"; - MetadataKind["Snapshot"] = "snapshot"; - MetadataKind["Targets"] = "targets"; -})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {})); diff --git a/node_modules/tuf-js/package.json b/node_modules/tuf-js/package.json index 758e71223e40f..29436c760ff20 100644 --- a/node_modules/tuf-js/package.json +++ b/node_modules/tuf-js/package.json @@ -1,56 +1,42 @@ { "name": "tuf-js", - "version": "1.0.0", + "version": "1.1.1", "description": "JavaScript implementation of The Update Framework (TUF)", "main": "dist/index.js", "types": "dist/index.d.ts", "scripts": { - "build": "tsc", - "test": "jest", - "test:watch": "jest --watch", - "test:ci": "jest --maxWorkers=2 --coverage", - "lint": "eslint --fix --ext .ts src/**", - "lint:check": "eslint --max-warnings 0 --ext .ts src/**", - "format": "prettier --write \"src/**/*\"" + "build": "tsc --build", + "clean": "rm -rf dist", + "test": "jest" }, "repository": { "type": "git", - "url": "git+https://github.com/github/tuf-js.git" + "url": "git+https://github.com/theupdateframework/tuf-js.git" }, "files": [ "dist" ], "keywords": [ - "tuf" + "tuf", + "security", + "update" ], "author": "bdehamer@github.com", "license": "MIT", "bugs": { - "url": "https://github.com/github/tuf-js/issues" + "url": "https://github.com/theupdateframework/tuf-js/issues" }, - "homepage": "https://github.com/github/tuf-js#readme", + "homepage": "https://github.com/theupdateframework/tuf-js/packages/client#readme", "devDependencies": { - "@tsconfig/node14": "^1.0.3", - "@types/jest": "^28.1.8", - "@types/lodash.isequal": "^4.5.6", + "@tufjs/repo-mock": "1.0.0", "@types/make-fetch-happen": "^10.0.1", - "@types/minimatch": "^5.1.2", - "@types/node": "^18.11.10", - "@typescript-eslint/eslint-plugin": "^5.45.0", - "@typescript-eslint/parser": "^5.45.0", - "eslint": "^8.28.0", - "eslint-config-prettier": "^8.5.0", - "eslint-plugin-prettier": "^4.2.1", - "http-server": "^14.1.1", - "jest": "^28.1.3", + "@types/node": "^18.14.5", "nock": "^13.2.9", - "prettier": "^2.8.0", - "ts-jest": "^28.0.8", - "typescript": "^4.9.3" + "typescript": "^4.9.5" }, "dependencies": { "make-fetch-happen": "^11.0.1", - "minimatch": "^6.1.0" + "@tufjs/models": "1.0.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/package-lock.json b/package-lock.json index 4eedbcd7ffd06..fb50001ed2377 100644 --- a/package-lock.json +++ b/package-lock.json @@ -228,9 +228,9 @@ } }, "node_modules/@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz", + "integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==", "dev": true, "dependencies": { "tunnel": "^0.0.6" @@ -428,9 +428,9 @@ } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.21.0.tgz", - "integrity": "sha512-eD/JQ21IG2i1FraJnTMbUarAUkA7G988ofehG5MDCRXaUU91rEBJuCeSoou2Sk1y4RbLYXzqEg1QLwEmRU4qcQ==", + "version": "7.21.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz", + "integrity": "sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==", "dev": true, "dependencies": { "@babel/helper-environment-visitor": "^7.18.9", @@ -439,8 +439,8 @@ "@babel/helper-split-export-declaration": "^7.18.6", "@babel/helper-validator-identifier": "^7.19.1", "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.0", - "@babel/types": "^7.21.0" + "@babel/traverse": "^7.21.2", + "@babel/types": "^7.21.2" }, "engines": { "node": ">=6.9.0" @@ -597,9 +597,9 @@ } }, "node_modules/@babel/parser": { - "version": "7.21.1", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.21.1.tgz", - "integrity": "sha512-JzhBFpkuhBNYUY7qs+wTzNmyCWUHEaAFpQQD2YfU1rPL38/L43Wvid0fFkiOCnHvsGncRZgEPyGnltABLcVDTg==", + "version": "7.21.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.21.2.tgz", + "integrity": "sha512-URpaIJQwEkEC2T9Kn+Ai6Xe/02iNaVCuT/PtoRz3GPVJVDpPd7mLo+VddTbhCRU9TXqW5mSrQfXZyi8kDKOVpQ==", "dev": true, "bin": { "parser": "bin/babel-parser.js" @@ -623,19 +623,19 @@ } }, "node_modules/@babel/traverse": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.0.tgz", - "integrity": "sha512-Xdt2P1H4LKTO8ApPfnO1KmzYMFpp7D/EinoXzLYN/cHcBNrVCAkAtGUcXnHXrl/VGktureU6fkQrHSBE2URfoA==", + "version": "7.21.2", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.2.tgz", + "integrity": "sha512-ts5FFU/dSUPS13tv8XiEObDu9K+iagEKME9kAbaP7r0Y9KtZJZ+NGndDvWoRAYNpeWafbpFeki3q9QoMD6gxyw==", "dev": true, "dependencies": { "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.21.0", + "@babel/generator": "^7.21.1", "@babel/helper-environment-visitor": "^7.18.9", "@babel/helper-function-name": "^7.21.0", "@babel/helper-hoist-variables": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/parser": "^7.21.0", - "@babel/types": "^7.21.0", + "@babel/parser": "^7.21.2", + "@babel/types": "^7.21.2", "debug": "^4.1.0", "globals": "^11.1.0" }, @@ -653,9 +653,9 @@ } }, "node_modules/@babel/types": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.0.tgz", - "integrity": "sha512-uR7NWq2VNFnDi7EYqiRz2Jv/VQIu38tu64Zy8TX2nQFQ6etJ9V/Rr2msW8BS132mum2rL645qpDrLtAJtVpuow==", + "version": "7.21.2", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.2.tgz", + "integrity": "sha512-3wRZSs7jiFaB8AjxiiD+VqN5DTG2iRvJGQ+qYFrs/654lg6kGTQWIOFjlBo5RaXuAZjBmP3+OQH4dmhqiiyYxw==", "dev": true, "dependencies": { "@babel/helper-string-parser": "^7.19.4", @@ -961,9 +961,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.4.1.tgz", - "integrity": "sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.0.tgz", + "integrity": "sha512-fluIaaV+GyV24CCu/ggiHdV+j4RNh85yQnAYS/G2mZODZgGmmlrgCydjUcV3YvxCm9x8nMAfThsqTni4KiXT4A==", "dev": true, "peer": true, "dependencies": { @@ -1032,6 +1032,16 @@ "node": "*" } }, + "node_modules/@eslint/js": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.35.0.tgz", + "integrity": "sha512-JXdzbRiWclLVoD8sNUjR443VVlYqiYmDVT6rGUEIEHU5YJW0gaVZwV2xgM7D4arkvASqD0IlLUVjHiFuxaftRw==", + "dev": true, + "peer": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/@gar/promisify": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", @@ -1413,9 +1423,9 @@ } }, "node_modules/@lerna/collect-updates/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "dev": true, "dependencies": { "core-util-is": "~1.0.0", @@ -1532,9 +1542,9 @@ } }, "node_modules/@lerna/describe-ref/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "dev": true, "dependencies": { "core-util-is": "~1.0.0", @@ -1712,9 +1722,9 @@ } }, "node_modules/@lerna/package-graph/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "dev": true, "dependencies": { "core-util-is": "~1.0.0", @@ -1929,9 +1939,9 @@ } }, "node_modules/@lerna/validation-error/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "dev": true, "dependencies": { "core-util-is": "~1.0.0", @@ -2452,6 +2462,18 @@ "integrity": "sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==", "dev": true }, + "node_modules/@tufjs/models": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-1.0.0.tgz", + "integrity": "sha512-RRMu4uMxWnZlxaIBxahSb2IssFZiu188sndesZflWOe1cA/qUqtemSIoBWbuVKPvvdktapImWNnKpBcc+VrCQw==", + "inBundle": true, + "dependencies": { + "minimatch": "^6.1.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@types/debug": { "version": "4.1.7", "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.7.tgz", @@ -2499,9 +2521,9 @@ "dev": true }, "node_modules/@types/node": { - "version": "18.14.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.0.tgz", - "integrity": "sha512-5EWrvLmglK+imbCJY0+INViFWUHg1AHel1sq4ZVSfdcNqGy9Edv3UB9IIzzg+xPaUcAgZYcfVs2fBcwDeZzU0A==", + "version": "18.14.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", + "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==", "dev": true }, "node_modules/@types/normalize-package-data": { @@ -2633,13 +2655,13 @@ } }, "node_modules/agentkeepalive": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.2.1.tgz", - "integrity": "sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.3.0.tgz", + "integrity": "sha512-7Epl1Blf4Sy37j4v9f9FjICCh4+KAQOyXgHEwlyBiAQLbhKdq/i2QQU3amQalS/wPhdPzDXPL5DMR5bkn+YeWg==", "inBundle": true, "dependencies": { "debug": "^4.1.0", - "depd": "^1.1.2", + "depd": "^2.0.0", "humanize-ms": "^1.2.1" }, "engines": { @@ -3190,9 +3212,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001457", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001457.tgz", - "integrity": "sha512-SDIV6bgE1aVbK6XyxdURbUE89zY7+k1BBBaOwYwkNCglXlel/E7mELiHC64HQ+W0xSKlqWhV9Wh7iHxUjMs4fA==", + "version": "1.0.30001462", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001462.tgz", + "integrity": "sha512-PDd20WuOBPiasZ7KbFnmQRyuLE7cFXW2PVd7dmALzbkUXEP46upAuCDm9eY9vho8fgNMGmbAX92QBZHzcnWIqw==", "dev": true, "funding": [ { @@ -3411,9 +3433,9 @@ } }, "node_modules/code-suggester": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/code-suggester/-/code-suggester-4.3.0.tgz", - "integrity": "sha512-mdZAin2jDyurAwMdDsE6DIDRogxpyDKRXBwGWTaY1XNmf0nw3hm8Kg1fNsa5YzqUNDNDeqzWIyeYjIUoX2N9bQ==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/code-suggester/-/code-suggester-4.3.1.tgz", + "integrity": "sha512-QVW9uSIMl/KLF9yrjm2wm1h/o3j2WgGnQTH+o2+okVFlhopcJGW2Jv+CwvGGgfoSTxkcG3hocQee4ZZhdMhcew==", "dev": true, "dependencies": { "@octokit/rest": "^19.0.5", @@ -3421,7 +3443,7 @@ "async-retry": "^1.3.1", "diff": "^5.0.0", "glob": "^7.1.6", - "parse-diff": "^0.10.0", + "parse-diff": "^0.11.0", "yargs": "^16.0.0" }, "bin": { @@ -3700,9 +3722,9 @@ } }, "node_modules/cosmiconfig": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.0.0.tgz", - "integrity": "sha512-da1EafcpH6b/TD8vDRaWV7xFINlHlF6zKsGwS1TsuVJTZRkquaS5HTMq7uq6h31619QjbsYl21gVDOm32KM1vQ==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.1.0.tgz", + "integrity": "sha512-0tLZ9URlPGU7JsKq0DQOQ3FoRsYX8xDZ7xMiATQfaiGMz7EHowNkbU9u1coAOmnh9p/1ySpm0RB3JNWRXM5GCg==", "dev": true, "dependencies": { "import-fresh": "^3.2.1", @@ -3712,6 +3734,9 @@ }, "engines": { "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" } }, "node_modules/cosmiconfig-typescript-loader": { @@ -4036,12 +4061,12 @@ "inBundle": true }, "node_modules/depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", "inBundle": true, "engines": { - "node": ">= 0.6" + "node": ">= 0.8" } }, "node_modules/deprecation": { @@ -4193,9 +4218,9 @@ "dev": true }, "node_modules/electron-to-chromium": { - "version": "1.4.305", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.305.tgz", - "integrity": "sha512-WETy6tG0CT5gm1O+xCbyapWNsCcmIvrn4NHViIGYo2AT8FV2qUCXdaB+WqYxSv/vS5mFqhBYnfZAAkVArjBmUg==", + "version": "1.4.324", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.324.tgz", + "integrity": "sha512-m+eBs/kh3TXnCuqDF6aHLLRwLK2U471JAbZ1KYigf0TM96fZglxv0/ZFBvyIxnLKsIWUoDiVnHTA2mhYz1fqdA==", "dev": true }, "node_modules/emoji-regex": { @@ -4462,13 +4487,14 @@ } }, "node_modules/eslint": { - "version": "8.34.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.34.0.tgz", - "integrity": "sha512-1Z8iFsucw+7kSqXNZVslXS8Ioa4u2KM7GPwuKtkTFAqZ/cHMcEaR+1+Br0wLlot49cNxIiZk5wp8EAbPcYZxTg==", + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.35.0.tgz", + "integrity": "sha512-BxAf1fVL7w+JLRQhWl2pzGeSiGqbWumV4WNvc9Rhp6tiCtm4oHnyPBSEtMGZwrQgudFQ+otqzWoPB7x+hxoWsw==", "dev": true, "peer": true, "dependencies": { - "@eslint/eslintrc": "^1.4.1", + "@eslint/eslintrc": "^2.0.0", + "@eslint/js": "8.35.0", "@humanwhocodes/config-array": "^0.11.8", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -4482,7 +4508,7 @@ "eslint-utils": "^3.0.0", "eslint-visitor-keys": "^3.3.0", "espree": "^9.4.0", - "esquery": "^1.4.0", + "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", @@ -4928,9 +4954,9 @@ } }, "node_modules/esquery": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.2.tgz", - "integrity": "sha512-JVSoLdTlTDkmjFmab7H/9SL9qGSyjElT3myyKp7krqjVFQCDLmj1QFaCLRFBszBKI0XVZaiiXvuPIX3ZwHe1Ng==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, "peer": true, "dependencies": { @@ -6239,14 +6265,14 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.1.tgz", - "integrity": "sha512-ASfLknmY8Xa2XtB4wmbz13Wu202baeA18cJBCeCy0wXUHZF0IPyVEXqKEcd+t2fNSLLL1vC6k7lxZEojNbISXQ==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", + "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", "dev": true, "peer": true, "dependencies": { "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", + "get-intrinsic": "^1.2.0", "is-typed-array": "^1.1.10" }, "funding": { @@ -7388,9 +7414,9 @@ } }, "node_modules/lru-cache": { - "version": "7.16.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.16.2.tgz", - "integrity": "sha512-t6D6OM05Y3f+61zNnXh/+0D69kAgJKVEWLuWL1r38CIHRPTWpNjwpR7S+nmiQlG5GmUB1BDiiMjU1Ihs4YBLlg==", + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", "inBundle": true, "engines": { "node": ">=12" @@ -9101,9 +9127,9 @@ } }, "node_modules/node-gyp/node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.1.tgz", + "integrity": "sha512-+rQmrWMYGA90yenhTYsLWAsLsqVC8osOw6PKE1HDYiO0gdPeKe/xDHNzIAIn4C91YQ6oenEhfYqqc1883qHbjQ==", "inBundle": true, "dependencies": { "inherits": "^2.0.3", @@ -9929,9 +9955,9 @@ } }, "node_modules/parse-diff": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/parse-diff/-/parse-diff-0.10.0.tgz", - "integrity": "sha512-7ZnB7HXPl2WdTtSGGzMFuCIzpmaxrDsLwKZp2AQRQVlmL9MUnCHESAmrbKKD5g+ItFJiCN2YPfPyMjWeJV/JoA==", + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/parse-diff/-/parse-diff-0.11.0.tgz", + "integrity": "sha512-9P+oyFJJU9jLd9/EEXf2nlyz6GNBxt2senI22/3ss1AVqf+ntxY9eNZJW9tqEyH8w8Ywfzhgixo061x70OCkzQ==", "dev": true }, "node_modules/parse-github-repo-url": { @@ -11298,9 +11324,9 @@ } }, "node_modules/spdx-correct": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", - "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", "inBundle": true, "dependencies": { "spdx-expression-parse": "^3.0.0", @@ -11382,9 +11408,9 @@ } }, "node_modules/split2/node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.1.tgz", + "integrity": "sha512-+rQmrWMYGA90yenhTYsLWAsLsqVC8osOw6PKE1HDYiO0gdPeKe/xDHNzIAIn4C91YQ6oenEhfYqqc1883qHbjQ==", "dev": true, "dependencies": { "inherits": "^2.0.3", @@ -13863,9 +13889,9 @@ } }, "node_modules/through2/node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.1.tgz", + "integrity": "sha512-+rQmrWMYGA90yenhTYsLWAsLsqVC8osOw6PKE1HDYiO0gdPeKe/xDHNzIAIn4C91YQ6oenEhfYqqc1883qHbjQ==", "dev": true, "dependencies": { "inherits": "^2.0.3", @@ -14030,14 +14056,14 @@ } }, "node_modules/tsconfig-paths": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", - "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", + "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", "dev": true, "peer": true, "dependencies": { "@types/json5": "^0.0.29", - "json5": "^1.0.1", + "json5": "^1.0.2", "minimist": "^1.2.6", "strip-bom": "^3.0.0" } @@ -14066,13 +14092,13 @@ } }, "node_modules/tuf-js": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.0.0.tgz", - "integrity": "sha512-1dxsQwESDzACJjTdYHQ4wJ1f/of7jALWKfJEHSBWUQB/5UTJUx9SW6GHXp4mZ1KvdBRJCpGjssoPFGi4hvw8/A==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.1.tgz", + "integrity": "sha512-WTp382/PR96k0dI4GD5RdiRhgOU0rAC7+lnoih/5pZg3cyb3aNMqDozleEEWwyfT3+FOg7Qz9JU3n6A44tLSHw==", "inBundle": true, "dependencies": { - "make-fetch-happen": "^11.0.1", - "minimatch": "^6.1.0" + "@tufjs/models": "1.0.0", + "make-fetch-happen": "^11.0.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" @@ -14260,10 +14286,13 @@ } }, "node_modules/unist-util-is": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.0.tgz", - "integrity": "sha512-Glt17jWwZeyqrFqOK0pF1Ded5U3yzJnFr8CG1GMjCWTp9zDo2p+cmD6pWbZU8AgM5WU3IzRv6+rBwhzsGh6hBQ==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", "dev": true, + "dependencies": { + "@types/unist": "^2.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified"