From 3c17b6965f0c5fffd5ac908388568a307466a73f Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 2 Mar 2022 10:52:09 -0800 Subject: [PATCH] deps: make-fetch-happen@10.0.4 --- node_modules/agentkeepalive/History.md | 6 + node_modules/agentkeepalive/index.d.ts | 10 +- node_modules/agentkeepalive/package.json | 2 +- .../node_modules/lru-cache/bundle/main.js | 1 + .../node_modules/lru-cache/bundle/main.mjs | 1 + .../node_modules/lru-cache/index.js | 38 +- .../node_modules/lru-cache/package.json | 26 +- .../node_modules/minipass-fetch/LICENSE | 28 ++ .../minipass-fetch/lib/abort-error.js | 17 + .../node_modules/minipass-fetch/lib/blob.js | 97 +++++ .../node_modules/minipass-fetch/lib/body.js | 346 +++++++++++++++++ .../minipass-fetch/lib/fetch-error.js | 32 ++ .../minipass-fetch/lib/headers.js | 267 +++++++++++++ .../node_modules/minipass-fetch/lib/index.js | 356 ++++++++++++++++++ .../minipass-fetch/lib/request.js | 281 ++++++++++++++ .../minipass-fetch/lib/response.js | 90 +++++ .../node_modules/minipass-fetch/package.json | 63 ++++ node_modules/make-fetch-happen/package.json | 14 +- package-lock.json | 78 ++-- package.json | 2 +- 20 files changed, 1696 insertions(+), 59 deletions(-) create mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js create mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/LICENSE create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/abort-error.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/blob.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/body.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/fetch-error.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/headers.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/index.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/request.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/response.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass-fetch/package.json diff --git a/node_modules/agentkeepalive/History.md b/node_modules/agentkeepalive/History.md index 00cb65f5db3c6..9db6e5576d42b 100644 --- a/node_modules/agentkeepalive/History.md +++ b/node_modules/agentkeepalive/History.md @@ -1,4 +1,10 @@ +4.2.1 / 2022-02-21 +================== + +**fixes** + * [[`8b13b5c`](http://github.com/node-modules/agentkeepalive/commit/8b13b5ca797f4779a0a8d393ad8ecb622cd27987)] - fix: explicitly set `| undefined` in type definitions (#99) (Benoit Lemoine <>) + 4.2.0 / 2021-12-31 ================== diff --git a/node_modules/agentkeepalive/index.d.ts b/node_modules/agentkeepalive/index.d.ts index 7840f9af423e1..c2ce7d207a64b 100644 --- a/node_modules/agentkeepalive/index.d.ts +++ b/node_modules/agentkeepalive/index.d.ts @@ -39,11 +39,11 @@ declare namespace AgentKeepAlive { } interface CommonHttpOption { - keepAlive?: boolean; - freeSocketTimeout?: number; - freeSocketKeepAliveTimeout?: number; - timeout?: number; - socketActiveTTL?: number; + keepAlive?: boolean | undefined; + freeSocketTimeout?: number | undefined; + freeSocketKeepAliveTimeout?: number | undefined; + timeout?: number | undefined; + socketActiveTTL?: number | undefined; } export interface HttpOptions extends http.AgentOptions, CommonHttpOption { } diff --git a/node_modules/agentkeepalive/package.json b/node_modules/agentkeepalive/package.json index aba00ea14847a..efa561d2c6d6c 100644 --- a/node_modules/agentkeepalive/package.json +++ b/node_modules/agentkeepalive/package.json @@ -1,6 +1,6 @@ { "name": "agentkeepalive", - "version": "4.2.0", + "version": "4.2.1", "description": "Missing keepalive http.Agent", "main": "index.js", "browser": "browser.js", diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js b/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js new file mode 100644 index 0000000000000..7eef327e92527 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js @@ -0,0 +1 @@ +(()=>{var t={10:t=>{const i="object"==typeof performance&&performance&&"function"==typeof performance.now?performance:Date,s=new Set,e=(t,i)=>{const s=`LRU_CACHE_OPTION_${t}`;l(s)&&o(s,`${t} option`,`options.${i}`,d)},h=(t,i)=>{const s=`LRU_CACHE_METHOD_${t}`;if(l(s)){const{prototype:e}=d,{get:h}=Object.getOwnPropertyDescriptor(e,t);o(s,`${t} method`,`cache.${i}()`,h)}},l=t=>!(process.noDeprecation||s.has(t)),o=(t,i,e,h)=>{s.add(t),process.emitWarning(`The ${i} is deprecated. Please use ${e} instead.`,"DeprecationWarning",t,h)},n=t=>t&&t===Math.floor(t)&&t>0&&isFinite(t),a=t=>n(t)?t<=Math.pow(2,8)?Uint8Array:t<=Math.pow(2,16)?Uint16Array:t<=Math.pow(2,32)?Uint32Array:t<=Number.MAX_SAFE_INTEGER?r:null:null;class r extends Array{constructor(t){super(t),this.fill(0)}}class p{constructor(t){const i=a(t);this.heap=new i(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}}class d{constructor(t={}){const{max:i,ttl:s,ttlResolution:h=1,ttlAutopurge:l,updateAgeOnGet:o,allowStale:r,dispose:c,disposeAfter:f,noDisposeOnSet:u,noUpdateTTL:z,maxSize:v,sizeCalculation:y}=t,{length:g,maxAge:m,stale:S}=t instanceof d?{}:t;if(!n(i))throw new TypeError("max option must be an integer");const x=a(i);if(!x)throw new Error("invalid max value: "+i);if(this.max=i,this.maxSize=v||0,this.sizeCalculation=y||g,this.sizeCalculation){if(!this.maxSize)throw new TypeError("cannot set sizeCalculation without setting maxSize");if("function"!=typeof this.sizeCalculation)throw new TypeError("sizeCalculating set to non-function")}if(this.keyMap=new Map,this.keyList=new Array(i).fill(null),this.valList=new Array(i).fill(null),this.next=new x(i),this.prev=new x(i),this.head=0,this.tail=0,this.free=new p(i),this.initialFill=1,this.size=0,"function"==typeof c&&(this.dispose=c),"function"==typeof f?(this.disposeAfter=f,this.disposed=[]):(this.disposeAfter=null,this.disposed=null),this.noDisposeOnSet=!!u,this.noUpdateTTL=!!z,this.maxSize){if(!n(this.maxSize))throw new TypeError("maxSize must be a positive integer if specified");this.initializeSizeTracking()}if(this.allowStale=!!r||!!S,this.updateAgeOnGet=!!o,this.ttlResolution=n(h)||0===h?h:1,this.ttlAutopurge=!!l,this.ttl=s||m||0,this.ttl){if(!n(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.initializeTTLTracking()}S&&e("stale","allowStale"),m&&e("maxAge","ttl"),g&&e("length","sizeCalculation")}initializeTTLTracking(){this.ttls=new r(this.max),this.starts=new r(this.max),this.setItemTTL=(t,s)=>{if(this.starts[t]=0!==s?i.now():0,this.ttls[t]=s,0!==s&&this.ttlAutopurge){const i=setTimeout((()=>{this.isStale(t)&&this.delete(this.keyList[t])}),s+1);i.unref&&i.unref()}},this.updateItemAge=t=>{this.starts[t]=0!==this.ttls[t]?i.now():0};let t=0;const s=()=>{const s=i.now();if(this.ttlResolution>0){t=s;const i=setTimeout((()=>t=0),this.ttlResolution);i.unref&&i.unref()}return s};this.isStale=i=>0!==this.ttls[i]&&0!==this.starts[i]&&(t||s())-this.starts[i]>this.ttls[i]}updateItemAge(t){}setItemTTL(t,i){}isStale(t){return!1}initializeSizeTracking(){this.calculatedSize=0,this.sizes=new r(this.max),this.removeItemSize=t=>this.calculatedSize-=this.sizes[t],this.addItemSize=(t,i,s,e,h)=>{const l=e||(h?h(i,s):0);this.sizes[t]=n(l)?l:0;const o=this.maxSize-this.sizes[t];for(;this.calculatedSize>o;)this.evict();this.calculatedSize+=this.sizes[t]},this.delete=t=>{if(0!==this.size){const i=this.keyMap.get(t);void 0!==i&&(this.calculatedSize-=this.sizes[i])}return d.prototype.delete.call(this,t)}}removeItemSize(t){}addItemSize(t,i,s,e,h){}*indexes(){if(this.size)for(let t=this.tail;this.isStale(t)||(yield t),t!==this.head;t=this.prev[t]);}*rindexes(){if(this.size)for(let t=this.head;this.isStale(t)||(yield t),t!==this.tail;t=this.next[t]);}*entries(){for(const t of this.indexes())yield[this.keyList[t],this.valList[t]]}*keys(){for(const t of this.indexes())yield this.keyList[t]}*values(){for(const t of this.indexes())yield this.valList[t]}[Symbol.iterator](){return this.entries()}find(t,i={}){for(const s of this.indexes())if(t(this.valList[s],this.keyList[s],this))return this.get(this.keyList[s],i)}forEach(t,i=this){for(const s of this.indexes())t.call(i,this.valList[s],this.keyList[s],this)}rforEach(t,i=this){for(const s of this.rindexes())t.call(i,this.valList[s],this.keyList[s],this)}get prune(){return h("prune","purgeStale"),this.purgeStale}purgeStale(){let t=!1;if(this.size)for(let i=this.head;;i=this.next[i]){const s=i===this.tail;if(this.isStale(i)&&(this.delete(this.keyList[i]),t=!0),s)break}return t}dump(){const t=[];for(const i of this.indexes()){const s=this.keyList[i],e={value:this.valList[i]};this.ttls&&(e.ttl=this.ttls[i]),this.sizes&&(e.size=this.sizes[i]),t.unshift([s,e])}return t}load(t){this.clear();for(const[i,s]of t)this.set(i,s.value,s)}dispose(t,i,s){}set(t,i,{ttl:s=this.ttl,noDisposeOnSet:e=this.noDisposeOnSet,size:h=0,sizeCalculation:l=this.sizeCalculation,noUpdateTTL:o=this.noUpdateTTL}={}){let n=0===this.size?void 0:this.keyMap.get(t);if(void 0===n)n=this.newIndex(),this.keyList[n]=t,this.valList[n]=i,this.keyMap.set(t,n),this.next[this.tail]=n,this.prev[n]=this.tail,this.tail=n,this.size++,this.addItemSize(n,i,t,h,l),o=!1;else{const s=this.valList[n];i!==s&&(e||(this.dispose(s,t,"set"),this.disposeAfter&&this.disposed.push([s,t,"set"])),this.removeItemSize(n),this.valList[n]=i,this.addItemSize(n,i,t,h,l)),this.moveToTail(n)}if(0===s||0!==this.ttl||this.ttls||this.initializeTTLTracking(),o||this.setItemTTL(n,s),this.disposeAfter)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return this}newIndex(){return 0===this.size?this.tail:this.size===this.max?this.evict():0!==this.free.length?this.free.pop():this.initialFill++}pop(){if(this.size){const t=this.valList[this.head];return this.evict(),t}}evict(){const t=this.head,i=this.keyList[t],s=this.valList[t];return this.dispose(s,i,"evict"),this.disposeAfter&&this.disposed.push([s,i,"evict"]),this.removeItemSize(t),this.head=this.next[t],this.keyMap.delete(i),this.size--,t}has(t){return this.keyMap.has(t)&&!this.isStale(this.keyMap.get(t))}peek(t,{allowStale:i=this.allowStale}={}){const s=this.keyMap.get(t);if(void 0!==s&&(i||!this.isStale(s)))return this.valList[s]}get(t,{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet}={}){const e=this.keyMap.get(t);if(void 0!==e){if(this.isStale(e)){const s=i?this.valList[e]:void 0;return this.delete(t),s}return this.moveToTail(e),s&&this.updateItemAge(e),this.valList[e]}}connect(t,i){this.prev[i]=t,this.next[t]=i}moveToTail(t){t!==this.tail&&(t===this.head?this.head=this.next[t]:this.connect(this.prev[t],this.next[t]),this.connect(this.tail,t),this.tail=t)}get del(){return h("del","delete"),this.delete}delete(t){let i=!1;if(0!==this.size){const s=this.keyMap.get(t);void 0!==s&&(i=!0,1===this.size?this.clear():(this.removeItemSize(s),this.dispose(this.valList[s],t,"delete"),this.disposeAfter&&this.disposed.push([this.valList[s],t,"delete"]),this.keyMap.delete(t),this.keyList[s]=null,this.valList[s]=null,s===this.tail?this.tail=this.prev[s]:s===this.head?this.head=this.next[s]:(this.next[this.prev[s]]=this.next[s],this.prev[this.next[s]]=this.prev[s]),this.size--,this.free.push(s)))}if(this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return i}clear(){if(this.dispose!==d.prototype.dispose)for(const t of this.rindexes())this.dispose(this.valList[t],this.keyList[t],"delete");if(this.disposeAfter)for(const t of this.rindexes())this.disposed.push([this.valList[t],this.keyList[t],"delete"]);if(this.keyMap.clear(),this.valList.fill(null),this.keyList.fill(null),this.ttls&&(this.ttls.fill(0),this.starts.fill(0)),this.sizes&&this.sizes.fill(0),this.head=0,this.tail=0,this.initialFill=1,this.free.length=0,this.calculatedSize=0,this.size=0,this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift())}get reset(){return h("reset","clear"),this.clear}get length(){return((t,i)=>{const s="LRU_CACHE_PROPERTY_length";if(l(s)){const{prototype:i}=d,{get:e}=Object.getOwnPropertyDescriptor(i,t);o(s,"length property","cache.size",e)}})("length"),this.size}}t.exports=d}},i={},s=function s(e){var h=i[e];if(void 0!==h)return h.exports;var l=i[e]={exports:{}};return t[e](l,l.exports,s),l.exports}(10);module.exports=s})(); \ No newline at end of file diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs b/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs new file mode 100644 index 0000000000000..3a4d674c07a41 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs @@ -0,0 +1 @@ +var t={10:t=>{const i="object"==typeof performance&&performance&&"function"==typeof performance.now?performance:Date,s=new Set,e=(t,i)=>{const s=`LRU_CACHE_OPTION_${t}`;l(s)&&o(s,`${t} option`,`options.${i}`,d)},h=(t,i)=>{const s=`LRU_CACHE_METHOD_${t}`;if(l(s)){const{prototype:e}=d,{get:h}=Object.getOwnPropertyDescriptor(e,t);o(s,`${t} method`,`cache.${i}()`,h)}},l=t=>!(process.noDeprecation||s.has(t)),o=(t,i,e,h)=>{s.add(t),process.emitWarning(`The ${i} is deprecated. Please use ${e} instead.`,"DeprecationWarning",t,h)},n=t=>t&&t===Math.floor(t)&&t>0&&isFinite(t),a=t=>n(t)?t<=Math.pow(2,8)?Uint8Array:t<=Math.pow(2,16)?Uint16Array:t<=Math.pow(2,32)?Uint32Array:t<=Number.MAX_SAFE_INTEGER?r:null:null;class r extends Array{constructor(t){super(t),this.fill(0)}}class p{constructor(t){const i=a(t);this.heap=new i(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}}class d{constructor(t={}){const{max:i,ttl:s,ttlResolution:h=1,ttlAutopurge:l,updateAgeOnGet:o,allowStale:r,dispose:c,disposeAfter:f,noDisposeOnSet:u,noUpdateTTL:z,maxSize:v,sizeCalculation:y}=t,{length:g,maxAge:m,stale:S}=t instanceof d?{}:t;if(!n(i))throw new TypeError("max option must be an integer");const L=a(i);if(!L)throw new Error("invalid max value: "+i);if(this.max=i,this.maxSize=v||0,this.sizeCalculation=y||g,this.sizeCalculation){if(!this.maxSize)throw new TypeError("cannot set sizeCalculation without setting maxSize");if("function"!=typeof this.sizeCalculation)throw new TypeError("sizeCalculating set to non-function")}if(this.keyMap=new Map,this.keyList=new Array(i).fill(null),this.valList=new Array(i).fill(null),this.next=new L(i),this.prev=new L(i),this.head=0,this.tail=0,this.free=new p(i),this.initialFill=1,this.size=0,"function"==typeof c&&(this.dispose=c),"function"==typeof f?(this.disposeAfter=f,this.disposed=[]):(this.disposeAfter=null,this.disposed=null),this.noDisposeOnSet=!!u,this.noUpdateTTL=!!z,this.maxSize){if(!n(this.maxSize))throw new TypeError("maxSize must be a positive integer if specified");this.initializeSizeTracking()}if(this.allowStale=!!r||!!S,this.updateAgeOnGet=!!o,this.ttlResolution=n(h)||0===h?h:1,this.ttlAutopurge=!!l,this.ttl=s||m||0,this.ttl){if(!n(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.initializeTTLTracking()}S&&e("stale","allowStale"),m&&e("maxAge","ttl"),g&&e("length","sizeCalculation")}initializeTTLTracking(){this.ttls=new r(this.max),this.starts=new r(this.max),this.setItemTTL=(t,s)=>{if(this.starts[t]=0!==s?i.now():0,this.ttls[t]=s,0!==s&&this.ttlAutopurge){const i=setTimeout((()=>{this.isStale(t)&&this.delete(this.keyList[t])}),s+1);i.unref&&i.unref()}},this.updateItemAge=t=>{this.starts[t]=0!==this.ttls[t]?i.now():0};let t=0;const s=()=>{const s=i.now();if(this.ttlResolution>0){t=s;const i=setTimeout((()=>t=0),this.ttlResolution);i.unref&&i.unref()}return s};this.isStale=i=>0!==this.ttls[i]&&0!==this.starts[i]&&(t||s())-this.starts[i]>this.ttls[i]}updateItemAge(t){}setItemTTL(t,i){}isStale(t){return!1}initializeSizeTracking(){this.calculatedSize=0,this.sizes=new r(this.max),this.removeItemSize=t=>this.calculatedSize-=this.sizes[t],this.addItemSize=(t,i,s,e,h)=>{const l=e||(h?h(i,s):0);this.sizes[t]=n(l)?l:0;const o=this.maxSize-this.sizes[t];for(;this.calculatedSize>o;)this.evict();this.calculatedSize+=this.sizes[t]},this.delete=t=>{if(0!==this.size){const i=this.keyMap.get(t);void 0!==i&&(this.calculatedSize-=this.sizes[i])}return d.prototype.delete.call(this,t)}}removeItemSize(t){}addItemSize(t,i,s,e,h){}*indexes(){if(this.size)for(let t=this.tail;this.isStale(t)||(yield t),t!==this.head;t=this.prev[t]);}*rindexes(){if(this.size)for(let t=this.head;this.isStale(t)||(yield t),t!==this.tail;t=this.next[t]);}*entries(){for(const t of this.indexes())yield[this.keyList[t],this.valList[t]]}*keys(){for(const t of this.indexes())yield this.keyList[t]}*values(){for(const t of this.indexes())yield this.valList[t]}[Symbol.iterator](){return this.entries()}find(t,i={}){for(const s of this.indexes())if(t(this.valList[s],this.keyList[s],this))return this.get(this.keyList[s],i)}forEach(t,i=this){for(const s of this.indexes())t.call(i,this.valList[s],this.keyList[s],this)}rforEach(t,i=this){for(const s of this.rindexes())t.call(i,this.valList[s],this.keyList[s],this)}get prune(){return h("prune","purgeStale"),this.purgeStale}purgeStale(){let t=!1;if(this.size)for(let i=this.head;;i=this.next[i]){const s=i===this.tail;if(this.isStale(i)&&(this.delete(this.keyList[i]),t=!0),s)break}return t}dump(){const t=[];for(const i of this.indexes()){const s=this.keyList[i],e={value:this.valList[i]};this.ttls&&(e.ttl=this.ttls[i]),this.sizes&&(e.size=this.sizes[i]),t.unshift([s,e])}return t}load(t){this.clear();for(const[i,s]of t)this.set(i,s.value,s)}dispose(t,i,s){}set(t,i,{ttl:s=this.ttl,noDisposeOnSet:e=this.noDisposeOnSet,size:h=0,sizeCalculation:l=this.sizeCalculation,noUpdateTTL:o=this.noUpdateTTL}={}){let n=0===this.size?void 0:this.keyMap.get(t);if(void 0===n)n=this.newIndex(),this.keyList[n]=t,this.valList[n]=i,this.keyMap.set(t,n),this.next[this.tail]=n,this.prev[n]=this.tail,this.tail=n,this.size++,this.addItemSize(n,i,t,h,l),o=!1;else{const s=this.valList[n];i!==s&&(e||(this.dispose(s,t,"set"),this.disposeAfter&&this.disposed.push([s,t,"set"])),this.removeItemSize(n),this.valList[n]=i,this.addItemSize(n,i,t,h,l)),this.moveToTail(n)}if(0===s||0!==this.ttl||this.ttls||this.initializeTTLTracking(),o||this.setItemTTL(n,s),this.disposeAfter)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return this}newIndex(){return 0===this.size?this.tail:this.size===this.max?this.evict():0!==this.free.length?this.free.pop():this.initialFill++}pop(){if(this.size){const t=this.valList[this.head];return this.evict(),t}}evict(){const t=this.head,i=this.keyList[t],s=this.valList[t];return this.dispose(s,i,"evict"),this.disposeAfter&&this.disposed.push([s,i,"evict"]),this.removeItemSize(t),this.head=this.next[t],this.keyMap.delete(i),this.size--,t}has(t){return this.keyMap.has(t)&&!this.isStale(this.keyMap.get(t))}peek(t,{allowStale:i=this.allowStale}={}){const s=this.keyMap.get(t);if(void 0!==s&&(i||!this.isStale(s)))return this.valList[s]}get(t,{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet}={}){const e=this.keyMap.get(t);if(void 0!==e){if(this.isStale(e)){const s=i?this.valList[e]:void 0;return this.delete(t),s}return this.moveToTail(e),s&&this.updateItemAge(e),this.valList[e]}}connect(t,i){this.prev[i]=t,this.next[t]=i}moveToTail(t){t!==this.tail&&(t===this.head?this.head=this.next[t]:this.connect(this.prev[t],this.next[t]),this.connect(this.tail,t),this.tail=t)}get del(){return h("del","delete"),this.delete}delete(t){let i=!1;if(0!==this.size){const s=this.keyMap.get(t);void 0!==s&&(i=!0,1===this.size?this.clear():(this.removeItemSize(s),this.dispose(this.valList[s],t,"delete"),this.disposeAfter&&this.disposed.push([this.valList[s],t,"delete"]),this.keyMap.delete(t),this.keyList[s]=null,this.valList[s]=null,s===this.tail?this.tail=this.prev[s]:s===this.head?this.head=this.next[s]:(this.next[this.prev[s]]=this.next[s],this.prev[this.next[s]]=this.prev[s]),this.size--,this.free.push(s)))}if(this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return i}clear(){if(this.dispose!==d.prototype.dispose)for(const t of this.rindexes())this.dispose(this.valList[t],this.keyList[t],"delete");if(this.disposeAfter)for(const t of this.rindexes())this.disposed.push([this.valList[t],this.keyList[t],"delete"]);if(this.keyMap.clear(),this.valList.fill(null),this.keyList.fill(null),this.ttls&&(this.ttls.fill(0),this.starts.fill(0)),this.sizes&&this.sizes.fill(0),this.head=0,this.tail=0,this.initialFill=1,this.free.length=0,this.calculatedSize=0,this.size=0,this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift())}get reset(){return h("reset","clear"),this.clear}get length(){return((t,i)=>{const s="LRU_CACHE_PROPERTY_length";if(l(s)){const{prototype:i}=d,{get:e}=Object.getOwnPropertyDescriptor(i,t);o(s,"length property","cache.size",e)}})("length"),this.size}}t.exports=d}},i={};!function s(e){var h=i[e];if(void 0!==h)return h.exports;var l=i[e]={exports:{}};return t[e](l,l.exports,s),l.exports}(10); \ No newline at end of file diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/index.js b/node_modules/make-fetch-happen/node_modules/lru-cache/index.js index ede2f30cc4b23..e9b2f37013e72 100644 --- a/node_modules/make-fetch-happen/node_modules/lru-cache/index.js +++ b/node_modules/make-fetch-happen/node_modules/lru-cache/index.js @@ -2,32 +2,32 @@ const perf = typeof performance === 'object' && performance && typeof performance.now === 'function' ? performance : Date const warned = new Set() -const deprecatedOption = (opt, msg) => { +const deprecatedOption = (opt, instead) => { const code = `LRU_CACHE_OPTION_${opt}` if (shouldWarn(code)) { - warn(code, `The ${opt} option is deprecated. ${msg}`, LRUCache) + warn(code, `${opt} option`, `options.${instead}`, LRUCache) } } -const deprecatedMethod = (method, msg) => { +const deprecatedMethod = (method, instead) => { const code = `LRU_CACHE_METHOD_${method}` if (shouldWarn(code)) { const { prototype } = LRUCache const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `The ${method} method is deprecated. ${msg}`, get) + warn(code, `${method} method`, `cache.${instead}()`, get) } } -const deprecatedProperty = (field, msg) => { +const deprecatedProperty = (field, instead) => { const code = `LRU_CACHE_PROPERTY_${field}` if (shouldWarn(code)) { const { prototype } = LRUCache const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `The ${field} property is deprecated. ${msg}`, get) + warn(code, `${field} property`, `cache.${instead}`, get) } } const shouldWarn = (code) => !(process.noDeprecation || warned.has(code)) -const warn = (code, msg, fn) => { +const warn = (code, what, instead, fn) => { warned.add(code) - process.emitWarning(msg, 'DeprecationWarning', code, fn) + process.emitWarning(`The ${what} is deprecated. Please use ${instead} instead.`, 'DeprecationWarning', code, fn) } const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) @@ -80,6 +80,7 @@ class LRUCache { dispose, disposeAfter, noDisposeOnSet, + noUpdateTTL, maxSize, sizeCalculation, } = options @@ -134,6 +135,7 @@ class LRUCache { this.disposed = null } this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL if (this.maxSize) { if (!isPosInt(this.maxSize)) { @@ -156,13 +158,13 @@ class LRUCache { } if (stale) { - deprecatedOption('stale', 'please use options.allowStale instead') + deprecatedOption('stale', 'allowStale') } if (maxAge) { - deprecatedOption('maxAge', 'please use options.ttl instead') + deprecatedOption('maxAge', 'ttl') } if (length) { - deprecatedOption('length', 'please use options.sizeCalculation instead') + deprecatedOption('length', 'sizeCalculation') } } @@ -305,7 +307,7 @@ class LRUCache { } get prune () { - deprecatedMethod('prune', 'Please use cache.purgeStale() instead.') + deprecatedMethod('prune', 'purgeStale') return this.purgeStale } @@ -357,6 +359,7 @@ class LRUCache { noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, } = {}) { let index = this.size === 0 ? undefined : this.keyMap.get(k) if (index === undefined) { @@ -370,6 +373,7 @@ class LRUCache { this.tail = index this.size ++ this.addItemSize(index, v, k, size, sizeCalculation) + noUpdateTTL = false } else { // update const oldVal = this.valList[index] @@ -389,7 +393,9 @@ class LRUCache { if (ttl !== 0 && this.ttl === 0 && !this.ttls) { this.initializeTTLTracking() } - this.setItemTTL(index, ttl) + if (!noUpdateTTL) { + this.setItemTTL(index, ttl) + } if (this.disposeAfter) { while (this.disposed.length) { this.disposeAfter(...this.disposed.shift()) @@ -493,7 +499,7 @@ class LRUCache { } get del () { - deprecatedMethod('del', 'Please use cache.delete() instead.') + deprecatedMethod('del', 'delete') return this.delete } delete (k) { @@ -568,12 +574,12 @@ class LRUCache { } } get reset () { - deprecatedMethod('reset', 'Please use cache.clear() instead.') + deprecatedMethod('reset', 'clear') return this.clear } get length () { - deprecatedProperty('length', 'Please use cache.size instead.') + deprecatedProperty('length', 'size') return this.size } } diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json index 66dbbd9c11503..ae92116975dc9 100644 --- a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json +++ b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "7.3.1", + "version": "7.4.0", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -9,26 +9,44 @@ "cache" ], "scripts": { + "prepare": "webpack-cli -o bundle ./index.js --node-env production", + "build": "npm run prepare", + "presize": "npm run prepare", "test": "tap", "snap": "tap", + "size": "size-limit", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags" }, "main": "index.js", + "browser": "./bundle/main.js", + "exports": { + ".": "./index.js", + "./browser": "./bundle/main.js" + }, "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", "benchmark": "^2.1.4", - "tap": "^15.1.6" + "size-limit": "^7.0.8", + "tap": "^15.1.6", + "webpack-cli": "^4.9.2" }, "license": "ISC", "files": [ - "index.js" + "index.js", + "bundle" ], "engines": { "node": ">=12" }, "tap": { "coverage-map": "map.js" - } + }, + "size-limit": [ + { + "path": "./bundle/main.js" + } + ] } diff --git a/node_modules/make-fetch-happen/node_modules/minipass-fetch/LICENSE b/node_modules/make-fetch-happen/node_modules/minipass-fetch/LICENSE new file mode 100644 index 0000000000000..3c3410cdc12ee --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass-fetch/LICENSE @@ -0,0 +1,28 @@ +The MIT License (MIT) + +Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +--- + +Note: This is a derivative work based on "node-fetch" by David Frank, +modified and distributed under the terms of the MIT license above. +https://github.com/bitinn/node-fetch diff --git a/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/abort-error.js b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/abort-error.js new file mode 100644 index 0000000000000..b18f643269e37 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/abort-error.js @@ -0,0 +1,17 @@ +'use strict' +class AbortError extends Error { + constructor (message) { + super(message) + this.code = 'FETCH_ABORTED' + this.type = 'aborted' + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'AbortError' + } + + // don't allow name to be overridden, but don't throw either + set name (s) {} +} +module.exports = AbortError diff --git a/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/blob.js b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/blob.js new file mode 100644 index 0000000000000..efe69a34458af --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/blob.js @@ -0,0 +1,97 @@ +'use strict' +const Minipass = require('minipass') +const TYPE = Symbol('type') +const BUFFER = Symbol('buffer') + +class Blob { + constructor (blobParts, options) { + this[TYPE] = '' + + const buffers = [] + let size = 0 + + if (blobParts) { + const a = blobParts + const length = Number(a.length) + for (let i = 0; i < length; i++) { + const element = a[i] + const buffer = element instanceof Buffer ? element + : ArrayBuffer.isView(element) + ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) + : element instanceof ArrayBuffer ? Buffer.from(element) + : element instanceof Blob ? element[BUFFER] + : typeof element === 'string' ? Buffer.from(element) + : Buffer.from(String(element)) + size += buffer.length + buffers.push(buffer) + } + } + + this[BUFFER] = Buffer.concat(buffers, size) + + const type = options && options.type !== undefined + && String(options.type).toLowerCase() + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type + } + } + + get size () { + return this[BUFFER].length + } + + get type () { + return this[TYPE] + } + + text () { + return Promise.resolve(this[BUFFER].toString()) + } + + arrayBuffer () { + const buf = this[BUFFER] + const off = buf.byteOffset + const len = buf.byteLength + const ab = buf.buffer.slice(off, off + len) + return Promise.resolve(ab) + } + + stream () { + return new Minipass().end(this[BUFFER]) + } + + slice (start, end, type) { + const size = this.size + const relativeStart = start === undefined ? 0 + : start < 0 ? Math.max(size + start, 0) + : Math.min(start, size) + const relativeEnd = end === undefined ? size + : end < 0 ? Math.max(size + end, 0) + : Math.min(end, size) + const span = Math.max(relativeEnd - relativeStart, 0) + + const buffer = this[BUFFER] + const slicedBuffer = buffer.slice( + relativeStart, + relativeStart + span + ) + const blob = new Blob([], { type }) + blob[BUFFER] = slicedBuffer + return blob + } + + get [Symbol.toStringTag] () { + return 'Blob' + } + + static get BUFFER () { + return BUFFER + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, +}) + +module.exports = Blob diff --git a/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/body.js b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/body.js new file mode 100644 index 0000000000000..2b8e3bd2f9e83 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/body.js @@ -0,0 +1,346 @@ +'use strict' +const Minipass = require('minipass') +const MinipassSized = require('minipass-sized') + +const Blob = require('./blob.js') +const { BUFFER } = Blob +const FetchError = require('./fetch-error.js') + +// optional dependency on 'encoding' +let convert +try { + convert = require('encoding').convert +} catch (e) {} + +const INTERNALS = Symbol('Body internals') +const CONSUME_BODY = Symbol('consumeBody') + +class Body { + constructor (bodyArg, options = {}) { + const { size = 0, timeout = 0 } = options + const body = bodyArg === undefined || bodyArg === null ? null + : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) + : isBlob(bodyArg) ? bodyArg + : Buffer.isBuffer(bodyArg) ? bodyArg + : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' + ? Buffer.from(bodyArg) + : ArrayBuffer.isView(bodyArg) + ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) + : Minipass.isStream(bodyArg) ? bodyArg + : Buffer.from(String(bodyArg)) + + this[INTERNALS] = { + body, + disturbed: false, + error: null, + } + + this.size = size + this.timeout = timeout + + if (Minipass.isStream(body)) { + body.on('error', er => { + const error = er.name === 'AbortError' ? er + : new FetchError(`Invalid response while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + this[INTERNALS].error = error + }) + } + } + + get body () { + return this[INTERNALS].body + } + + get bodyUsed () { + return this[INTERNALS].disturbed + } + + arrayBuffer () { + return this[CONSUME_BODY]().then(buf => + buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) + } + + blob () { + const ct = this.headers && this.headers.get('content-type') || '' + return this[CONSUME_BODY]().then(buf => Object.assign( + new Blob([], { type: ct.toLowerCase() }), + { [BUFFER]: buf } + )) + } + + json () { + return this[CONSUME_BODY]().then(buf => { + try { + return JSON.parse(buf.toString()) + } catch (er) { + return Promise.reject(new FetchError( + `invalid json response body at ${ + this.url} reason: ${er.message}`, 'invalid-json')) + } + }) + } + + text () { + return this[CONSUME_BODY]().then(buf => buf.toString()) + } + + buffer () { + return this[CONSUME_BODY]() + } + + textConverted () { + return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) + } + + [CONSUME_BODY] () { + if (this[INTERNALS].disturbed) { + return Promise.reject(new TypeError(`body used already for: ${ + this.url}`)) + } + + this[INTERNALS].disturbed = true + + if (this[INTERNALS].error) { + return Promise.reject(this[INTERNALS].error) + } + + // body is null + if (this.body === null) { + return Promise.resolve(Buffer.alloc(0)) + } + + if (Buffer.isBuffer(this.body)) { + return Promise.resolve(this.body) + } + + const upstream = isBlob(this.body) ? this.body.stream() : this.body + + /* istanbul ignore if: should never happen */ + if (!Minipass.isStream(upstream)) { + return Promise.resolve(Buffer.alloc(0)) + } + + const stream = this.size && upstream instanceof MinipassSized ? upstream + : !this.size && upstream instanceof Minipass && + !(upstream instanceof MinipassSized) ? upstream + : this.size ? new MinipassSized({ size: this.size }) + : new Minipass() + + // allow timeout on slow response body + const resTimeout = this.timeout ? setTimeout(() => { + stream.emit('error', new FetchError( + `Response timeout while trying to fetch ${ + this.url} (over ${this.timeout}ms)`, 'body-timeout')) + }, this.timeout) : null + + // do not keep the process open just for this timeout, even + // though we expect it'll get cleared eventually. + if (resTimeout && resTimeout.unref) { + resTimeout.unref() + } + + // do the pipe in the promise, because the pipe() can send too much + // data through right away and upset the MP Sized object + return new Promise((resolve, reject) => { + // if the stream is some other kind of stream, then pipe through a MP + // so we can collect it more easily. + if (stream !== upstream) { + upstream.on('error', er => stream.emit('error', er)) + upstream.pipe(stream) + } + resolve() + }).then(() => stream.concat()).then(buf => { + clearTimeout(resTimeout) + return buf + }).catch(er => { + clearTimeout(resTimeout) + // request was aborted, reject with this Error + if (er.name === 'AbortError' || er.name === 'FetchError') { + throw er + } else if (er.name === 'RangeError') { + throw new FetchError(`Could not create Buffer from response body for ${ + this.url}: ${er.message}`, 'system', er) + } else { + // other errors, such as incorrect content-encoding or content-length + throw new FetchError(`Invalid response body while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + } + }) + } + + static clone (instance) { + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used') + } + + const body = instance.body + + // check that body is a stream and not form-data object + // NB: can't clone the form-data object without having it as a dependency + if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { + // create a dedicated tee stream so that we don't lose data + // potentially sitting in the body stream's buffer by writing it + // immediately to p1 and not having it for p2. + const tee = new Minipass() + const p1 = new Minipass() + const p2 = new Minipass() + tee.on('error', er => { + p1.emit('error', er) + p2.emit('error', er) + }) + body.on('error', er => tee.emit('error', er)) + tee.pipe(p1) + tee.pipe(p2) + body.pipe(tee) + // set instance body to one fork, return the other + instance[INTERNALS].body = p1 + return p2 + } else { + return instance.body + } + } + + static extractContentType (body) { + return body === null || body === undefined ? null + : typeof body === 'string' ? 'text/plain;charset=UTF-8' + : isURLSearchParams(body) + ? 'application/x-www-form-urlencoded;charset=UTF-8' + : isBlob(body) ? body.type || null + : Buffer.isBuffer(body) ? null + : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null + : ArrayBuffer.isView(body) ? null + : typeof body.getBoundary === 'function' + ? `multipart/form-data;boundary=${body.getBoundary()}` + : Minipass.isStream(body) ? null + : 'text/plain;charset=UTF-8' + } + + static getTotalBytes (instance) { + const { body } = instance + return (body === null || body === undefined) ? 0 + : isBlob(body) ? body.size + : Buffer.isBuffer(body) ? body.length + : body && typeof body.getLengthSync === 'function' && ( + // detect form data input from form-data module + body._lengthRetrievers && + /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) // 2.x + ? body.getLengthSync() + : null + } + + static writeToStream (dest, instance) { + const { body } = instance + + if (body === null || body === undefined) { + dest.end() + } else if (Buffer.isBuffer(body) || typeof body === 'string') { + dest.end(body) + } else { + // body is stream or blob + const stream = isBlob(body) ? body.stream() : body + stream.on('error', er => dest.emit('error', er)).pipe(dest) + } + + return dest + } +} + +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true }, +}) + +const isURLSearchParams = obj => + // Duck-typing as a necessary condition. + (typeof obj !== 'object' || + typeof obj.append !== 'function' || + typeof obj.delete !== 'function' || + typeof obj.get !== 'function' || + typeof obj.getAll !== 'function' || + typeof obj.has !== 'function' || + typeof obj.set !== 'function') ? false + // Brand-checking and more duck-typing as optional condition. + : obj.constructor.name === 'URLSearchParams' || + Object.prototype.toString.call(obj) === '[object URLSearchParams]' || + typeof obj.sort === 'function' + +const isBlob = obj => + typeof obj === 'object' && + typeof obj.arrayBuffer === 'function' && + typeof obj.type === 'string' && + typeof obj.stream === 'function' && + typeof obj.constructor === 'function' && + typeof obj.constructor.name === 'string' && + /^(Blob|File)$/.test(obj.constructor.name) && + /^(Blob|File)$/.test(obj[Symbol.toStringTag]) + +const convertBody = (buffer, headers) => { + /* istanbul ignore if */ + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function') + } + + const ct = headers && headers.get('content-type') + let charset = 'utf-8' + let res + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct) + } + + // no charset in content type, peek at response body for at most 1024 bytes + const str = buffer.slice(0, 1024).toString() + + // html5 + if (!res && str) { + res = / this.expect + ? 'max-size' : type + this.message = message + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'FetchError' + } + + // don't allow name to be overwritten + set name (n) {} + + get [Symbol.toStringTag] () { + return 'FetchError' + } +} +module.exports = FetchError diff --git a/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/headers.js b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/headers.js new file mode 100644 index 0000000000000..dd6e854d5ba39 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/headers.js @@ -0,0 +1,267 @@ +'use strict' +const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +const validateName = name => { + name = `${name}` + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`) + } +} + +const validateValue = value => { + value = `${value}` + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`) + } +} + +const find = (map, name) => { + name = name.toLowerCase() + for (const key in map) { + if (key.toLowerCase() === name) { + return key + } + } + return undefined +} + +const MAP = Symbol('map') +class Headers { + constructor (init = undefined) { + this[MAP] = Object.create(null) + if (init instanceof Headers) { + const rawHeaders = init.raw() + const headerNames = Object.keys(rawHeaders) + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value) + } + } + return + } + + // no-op + if (init === undefined || init === null) { + return + } + + if (typeof init === 'object') { + const method = init[Symbol.iterator] + if (method !== null && method !== undefined) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable') + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = [] + for (const pair of init) { + if (typeof pair !== 'object' || + typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable') + } + const arrPair = Array.from(pair) + if (arrPair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple') + } + pairs.push(arrPair) + } + + for (const pair of pairs) { + this.append(pair[0], pair[1]) + } + } else { + // record + for (const key of Object.keys(init)) { + this.append(key, init[key]) + } + } + } else { + throw new TypeError('Provided initializer must be an object') + } + } + + get (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key === undefined) { + return null + } + + return this[MAP][key].join(', ') + } + + forEach (callback, thisArg = undefined) { + let pairs = getHeaders(this) + for (let i = 0; i < pairs.length; i++) { + const [name, value] = pairs[i] + callback.call(thisArg, value, name, this) + // refresh in case the callback added more headers + pairs = getHeaders(this) + } + } + + set (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + this[MAP][key !== undefined ? key : name] = [value] + } + + append (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + if (key !== undefined) { + this[MAP][key].push(value) + } else { + this[MAP][name] = [value] + } + } + + has (name) { + name = `${name}` + validateName(name) + return find(this[MAP], name) !== undefined + } + + delete (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key !== undefined) { + delete this[MAP][key] + } + } + + raw () { + return this[MAP] + } + + keys () { + return new HeadersIterator(this, 'key') + } + + values () { + return new HeadersIterator(this, 'value') + } + + [Symbol.iterator] () { + return new HeadersIterator(this, 'key+value') + } + + entries () { + return new HeadersIterator(this, 'key+value') + } + + get [Symbol.toStringTag] () { + return 'Headers' + } + + static exportNodeCompatibleHeaders (headers) { + const obj = Object.assign(Object.create(null), headers[MAP]) + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host') + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0] + } + + return obj + } + + static createHeadersLenient (obj) { + const headers = new Headers() + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue + } + + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue + } + + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val] + } else { + headers[MAP][name].push(val) + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]] + } + } + return headers + } +} + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true }, +}) + +const getHeaders = (headers, kind = 'key+value') => + Object.keys(headers[MAP]).sort().map( + kind === 'key' ? k => k.toLowerCase() + : kind === 'value' ? k => headers[MAP][k].join(', ') + : k => [k.toLowerCase(), headers[MAP][k].join(', ')] + ) + +const INTERNAL = Symbol('internal') + +class HeadersIterator { + constructor (target, kind) { + this[INTERNAL] = { + target, + kind, + index: 0, + } + } + + get [Symbol.toStringTag] () { + return 'HeadersIterator' + } + + next () { + /* istanbul ignore if: should be impossible */ + if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { + throw new TypeError('Value of `this` is not a HeadersIterator') + } + + const { target, kind, index } = this[INTERNAL] + const values = getHeaders(target, kind) + const len = values.length + if (index >= len) { + return { + value: undefined, + done: true, + } + } + + this[INTERNAL].index++ + + return { value: values[index], done: false } + } +} + +// manually extend because 'extends' requires a ctor +Object.setPrototypeOf(HeadersIterator.prototype, + Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) + +module.exports = Headers diff --git a/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/index.js b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/index.js new file mode 100644 index 0000000000000..473630e1a5857 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/index.js @@ -0,0 +1,356 @@ +'use strict' +const { URL } = require('url') +const http = require('http') +const https = require('https') +const zlib = require('minizlib') +const Minipass = require('minipass') + +const Body = require('./body.js') +const { writeToStream, getTotalBytes } = Body +const Response = require('./response.js') +const Headers = require('./headers.js') +const { createHeadersLenient } = Headers +const Request = require('./request.js') +const { getNodeRequestOptions } = Request +const FetchError = require('./fetch-error.js') +const AbortError = require('./abort-error.js') + +// XXX this should really be split up and unit-ized for easier testing +// and better DRY implementation of data/http request aborting +const fetch = async (url, opts) => { + if (/^data:/.test(url)) { + const request = new Request(url, opts) + // delay 1 promise tick so that the consumer can abort right away + return Promise.resolve().then(() => new Promise((resolve, reject) => { + let type, data + try { + const { pathname, search } = new URL(url) + const split = pathname.split(',') + if (split.length < 2) { + throw new Error('invalid data: URI') + } + const mime = split.shift() + const base64 = /;base64$/.test(mime) + type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime + const rawData = decodeURIComponent(split.join(',') + search) + data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) + } catch (er) { + return reject(new FetchError(`[${request.method}] ${ + request.url} invalid URL, ${er.message}`, 'system', er)) + } + + const { signal } = request + if (signal && signal.aborted) { + return reject(new AbortError('The user aborted a request.')) + } + + const headers = { 'Content-Length': data.length } + if (type) { + headers['Content-Type'] = type + } + return resolve(new Response(data, { headers })) + })) + } + + return new Promise((resolve, reject) => { + // build request object + const request = new Request(url, opts) + let options + try { + options = getNodeRequestOptions(request) + } catch (er) { + return reject(er) + } + + const send = (options.protocol === 'https:' ? https : http).request + const { signal } = request + let response = null + const abort = () => { + const error = new AbortError('The user aborted a request.') + reject(error) + if (Minipass.isStream(request.body) && + typeof request.body.destroy === 'function') { + request.body.destroy(error) + } + if (response && response.body) { + response.body.emit('error', error) + } + } + + if (signal && signal.aborted) { + return abort() + } + + const abortAndFinalize = () => { + abort() + finalize() + } + + const finalize = () => { + req.abort() + if (signal) { + signal.removeEventListener('abort', abortAndFinalize) + } + clearTimeout(reqTimeout) + } + + // send request + const req = send(options) + + if (signal) { + signal.addEventListener('abort', abortAndFinalize) + } + + let reqTimeout = null + if (request.timeout) { + req.once('socket', socket => { + reqTimeout = setTimeout(() => { + reject(new FetchError(`network timeout at: ${ + request.url}`, 'request-timeout')) + finalize() + }, request.timeout) + }) + } + + req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) { + req.res.emit('error', er) + } + reject(new FetchError(`request to ${request.url} failed, reason: ${ + er.message}`, 'system', er)) + finalize() + }) + + req.on('response', res => { + clearTimeout(reqTimeout) + + const headers = createHeadersLenient(res.headers) + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location') + + // HTTP fetch step 5.3 + const locationURL = location === null ? null + : (new URL(location, request.url)).toString() + + // HTTP fetch step 5.5 + if (request.redirect === 'error') { + reject(new FetchError('uri requested responds with a redirect, ' + + `redirect mode is set to error: ${request.url}`, 'no-redirect')) + finalize() + return + } else if (request.redirect === 'manual') { + // node-fetch-specific step: make manual redirect a bit easier to + // use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL) + } catch (err) { + /* istanbul ignore next: nodejs server prevent invalid + response headers, we can't test this through normal + request */ + reject(err) + } + } + } else if (request.redirect === 'follow' && locationURL !== null) { + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${ + request.url}`, 'max-redirect')) + finalize() + return + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && + request.body && + getTotalBytes(request) === null) { + reject(new FetchError( + 'Cannot follow redirect with body being a readable stream', + 'unsupported-redirect' + )) + finalize() + return + } + + // Update host due to redirection + request.headers.set('host', (new URL(locationURL)).host) + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || ( + (res.statusCode === 301 || res.statusCode === 302) && + request.method === 'POST' + )) { + requestOpts.method = 'GET' + requestOpts.body = undefined + requestOpts.headers.delete('content-length') + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))) + finalize() + return + } + } // end if(isRedirect) + + // prepare response + res.once('end', () => + signal && signal.removeEventListener('abort', abortAndFinalize)) + + const body = new Minipass() + // if an error occurs, either on the response stream itself, on one of the + // decoder streams, or a response length timeout from the Body class, we + // forward the error through to our internal body stream. If we see an + // error event on that, we call finalize to abort the request and ensure + // we don't leave a socket believing a request is in flight. + // this is difficult to test, so lacks specific coverage. + body.on('error', finalize) + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) + res.on('data', (chunk) => body.write(chunk)) + res.on('end', () => body.end()) + + const responseOptions = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter, + trailer: new Promise(resolve => + res.on('end', () => resolve(createHeadersLenient(res.trailers)))), + } + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding') + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || + request.method === 'HEAD' || + codings === null || + res.statusCode === 204 || + res.statusCode === 304) { + response = new Response(body, responseOptions) + resolve(response) + return + } + + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH, + } + + // for gzip + if (codings === 'gzip' || codings === 'x-gzip') { + const unzip = new zlib.Gunzip(zlibOptions) + response = new Response( + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), + responseOptions + ) + resolve(response) + return + } + + // for deflate + if (codings === 'deflate' || codings === 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new Minipass()) + raw.once('data', chunk => { + // see http://stackoverflow.com/questions/37519828 + const decoder = (chunk[0] & 0x0F) === 0x08 + ? new zlib.Inflate() + : new zlib.InflateRaw() + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + }) + return + } + + // for br + if (codings === 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + return + } + + // otherwise, use response as-is + response = new Response(body, responseOptions) + resolve(response) + }) + + writeToStream(req, request) + }) +} + +module.exports = fetch + +fetch.isRedirect = code => + code === 301 || + code === 302 || + code === 303 || + code === 307 || + code === 308 + +fetch.Headers = Headers +fetch.Request = Request +fetch.Response = Response +fetch.FetchError = FetchError diff --git a/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/request.js b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/request.js new file mode 100644 index 0000000000000..e620df6de2c01 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/request.js @@ -0,0 +1,281 @@ +'use strict' +const { URL } = require('url') +const Minipass = require('minipass') +const Headers = require('./headers.js') +const { exportNodeCompatibleHeaders } = Headers +const Body = require('./body.js') +const { clone, extractContentType, getTotalBytes } = Body + +const version = require('../package.json').version +const defaultUserAgent = + `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` + +const INTERNALS = Symbol('Request internals') + +const isRequest = input => + typeof input === 'object' && typeof input[INTERNALS] === 'object' + +const isAbortSignal = signal => { + const proto = ( + signal + && typeof signal === 'object' + && Object.getPrototypeOf(signal) + ) + return !!(proto && proto.constructor.name === 'AbortSignal') +} + +class Request extends Body { + constructor (input, init = {}) { + const parsedURL = isRequest(input) ? new URL(input.url) + : input && input.href ? new URL(input.href) + : new URL(`${input}`) + + if (isRequest(input)) { + init = { ...input[INTERNALS], ...init } + } else if (!input || typeof input === 'string') { + input = {} + } + + const method = (init.method || input.method || 'GET').toUpperCase() + const isGETHEAD = method === 'GET' || method === 'HEAD' + + if ((init.body !== null && init.body !== undefined || + isRequest(input) && input.body !== null) && isGETHEAD) { + throw new TypeError('Request with GET/HEAD method cannot have body') + } + + const inputBody = init.body !== null && init.body !== undefined ? init.body + : isRequest(input) && input.body !== null ? clone(input) + : null + + super(inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0, + }) + + const headers = new Headers(init.headers || input.headers || {}) + + if (inputBody !== null && inputBody !== undefined && + !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + const signal = 'signal' in init ? init.signal + : null + + if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { + throw new TypeError('Expected signal must be an instanceof AbortSignal') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = init + + this[INTERNALS] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow + : input.follow !== undefined ? input.follow + : 20 + this.compress = init.compress !== undefined ? init.compress + : input.compress !== undefined ? input.compress + : true + this.counter = init.counter || input.counter || 0 + this.agent = init.agent || input.agent + } + + get method () { + return this[INTERNALS].method + } + + get url () { + return this[INTERNALS].parsedURL.toString() + } + + get headers () { + return this[INTERNALS].headers + } + + get redirect () { + return this[INTERNALS].redirect + } + + get signal () { + return this[INTERNALS].signal + } + + clone () { + return new Request(this) + } + + get [Symbol.toStringTag] () { + return 'Request' + } + + static getNodeRequestOptions (request) { + const parsedURL = request[INTERNALS].parsedURL + const headers = new Headers(request[INTERNALS].headers) + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*') + } + + // Basic fetch + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported') + } + + if (request.signal && + Minipass.isStream(request.body) && + typeof request.body.destroy !== 'function') { + throw new Error( + 'Cancellation of streamed requests with AbortSignal is not supported') + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + const contentLengthValue = + (request.body === null || request.body === undefined) && + /^(POST|PUT)$/i.test(request.method) ? '0' + : request.body !== null && request.body !== undefined + ? getTotalBytes(request) + : null + + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue + '') + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', defaultUserAgent) + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate') + } + + const agent = typeof request.agent === 'function' + ? request.agent(parsedURL) + : request.agent + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = request[INTERNALS] + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + // we cannot spread parsedURL directly, so we have to read each property one-by-one + // and map them to the equivalent https?.request() method options + const urlProps = { + auth: parsedURL.username || parsedURL.password + ? `${parsedURL.username}:${parsedURL.password}` + : '', + host: parsedURL.host, + hostname: parsedURL.hostname, + path: `${parsedURL.pathname}${parsedURL.search}`, + port: parsedURL.port, + protocol: parsedURL.protocol, + } + + return { + ...urlProps, + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + } +} + +module.exports = Request + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true }, +}) diff --git a/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/response.js b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/response.js new file mode 100644 index 0000000000000..54cb52db3594a --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass-fetch/lib/response.js @@ -0,0 +1,90 @@ +'use strict' +const http = require('http') +const { STATUS_CODES } = http + +const Headers = require('./headers.js') +const Body = require('./body.js') +const { clone, extractContentType } = Body + +const INTERNALS = Symbol('Response internals') + +class Response extends Body { + constructor (body = null, opts = {}) { + super(body, opts) + + const status = opts.status || 200 + const headers = new Headers(opts.headers) + + if (body !== null && body !== undefined && !headers.has('Content-Type')) { + const contentType = extractContentType(body) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + this[INTERNALS] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter, + trailer: Promise.resolve(opts.trailer || new Headers()), + } + } + + get trailer () { + return this[INTERNALS].trailer + } + + get url () { + return this[INTERNALS].url || '' + } + + get status () { + return this[INTERNALS].status + } + + get ok () { + return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 + } + + get redirected () { + return this[INTERNALS].counter > 0 + } + + get statusText () { + return this[INTERNALS].statusText + } + + get headers () { + return this[INTERNALS].headers + } + + clone () { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected, + trailer: this.trailer, + }) + } + + get [Symbol.toStringTag] () { + return 'Response' + } +} + +module.exports = Response + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true }, +}) diff --git a/node_modules/make-fetch-happen/node_modules/minipass-fetch/package.json b/node_modules/make-fetch-happen/node_modules/minipass-fetch/package.json new file mode 100644 index 0000000000000..47e32dad6df8b --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass-fetch/package.json @@ -0,0 +1,63 @@ +{ + "name": "minipass-fetch", + "version": "2.0.2", + "description": "An implementation of window.fetch in Node.js using Minipass streams", + "license": "MIT", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "lint": "eslint '**/*.js'", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "lintfix": "npm run lint -- --fix", + "prepublishOnly": "git push origin --follow-tags", + "posttest": "npm run lint" + }, + "tap": { + "coverage-map": "map.js", + "check-coverage": true + }, + "devDependencies": { + "@npmcli/template-oss": "^2.8.1", + "@ungap/url-search-params": "^0.2.2", + "abort-controller": "^3.0.0", + "abortcontroller-polyfill": "~1.7.3", + "form-data": "^4.0.0", + "parted": "^0.1.1", + "string-to-arraybuffer": "^1.0.2", + "tap": "^15.1.6" + }, + "dependencies": { + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/minipass-fetch.git" + }, + "keywords": [ + "fetch", + "minipass", + "node-fetch", + "window.fetch" + ], + "files": [ + "bin", + "lib" + ], + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "author": "GitHub Inc.", + "templateOSS": { + "version": "2.8.1" + } +} diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json index c01a09ac5d7aa..b871b18e09766 100644 --- a/node_modules/make-fetch-happen/package.json +++ b/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "10.0.3", + "version": "10.0.4", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ @@ -33,16 +33,16 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "agentkeepalive": "^4.2.0", + "agentkeepalive": "^4.2.1", "cacache": "^15.3.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "lru-cache": "^7.3.1", + "lru-cache": "^7.4.0", "minipass": "^3.1.6", "minipass-collect": "^1.0.2", - "minipass-fetch": "^1.4.1", + "minipass-fetch": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", @@ -51,8 +51,8 @@ "ssri": "^8.0.1" }, "devDependencies": { - "@npmcli/template-oss": "^2.7.1", - "eslint": "^8.9.0", + "@npmcli/template-oss": "^2.8.1", + "eslint": "^8.10.0", "mkdirp": "^1.0.4", "nock": "^13.2.4", "rimraf": "^3.0.2", @@ -69,6 +69,6 @@ "check-coverage": true }, "templateOSS": { - "version": "2.7.1" + "version": "2.8.1" } } diff --git a/package-lock.json b/package-lock.json index bb48f97294838..56320a4938b19 100644 --- a/package-lock.json +++ b/package-lock.json @@ -122,7 +122,7 @@ "libnpmsearch": "^5.0.0", "libnpmteam": "^4.0.0", "libnpmversion": "^3.0.0", - "make-fetch-happen": "^10.0.3", + "make-fetch-happen": "^10.0.4", "minipass": "^3.1.6", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", @@ -1157,9 +1157,9 @@ } }, "node_modules/agentkeepalive": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.2.0.tgz", - "integrity": "sha512-0PhAp58jZNw13UJv7NVdTGb0ZcghHUb3DrZ046JiiJY/BOaTTpbwdHq2VObPCBV8M2GPh7sgrJ3AQ8Ey468LJw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.2.1.tgz", + "integrity": "sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA==", "inBundle": true, "dependencies": { "debug": "^4.1.0", @@ -4972,21 +4972,21 @@ "peer": true }, "node_modules/make-fetch-happen": { - "version": "10.0.3", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.3.tgz", - "integrity": "sha512-CzarPHynPpHjhF5in/YapnO44rSZeYX5VCMfdXa99+gLwpbfFLh20CWa6dP/taV9Net9PWJwXNKtp/4ZTCQnag==", + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.4.tgz", + "integrity": "sha512-CiReW6usy3UXby5N46XjWfLPFPq1glugCszh18I0NYJCwr129ZAx9j3Dlv+cRsK0q3VjlVysEzhdtdw2+NhdYA==", "inBundle": true, "dependencies": { - "agentkeepalive": "^4.2.0", + "agentkeepalive": "^4.2.1", "cacache": "^15.3.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "lru-cache": "^7.3.1", + "lru-cache": "^7.4.0", "minipass": "^3.1.6", "minipass-collect": "^1.0.2", - "minipass-fetch": "^1.4.1", + "minipass-fetch": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", @@ -4999,14 +4999,31 @@ } }, "node_modules/make-fetch-happen/node_modules/lru-cache": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.3.1.tgz", - "integrity": "sha512-nX1x4qUrKqwbIAhv4s9et4FIUVzNOpeY07bsjGUy8gwJrXH/wScImSQqXErmo/b2jZY2r0mohbLA9zVj7u1cNw==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.4.0.tgz", + "integrity": "sha512-YOfuyWa/Ee+PXbDm40j9WXyJrzQUynVbgn4Km643UYcWNcrSfRkKL0WaiUcxcIbkXcVTgNpDqSnPXntWXT75cw==", "inBundle": true, "engines": { "node": ">=12" } }, + "node_modules/make-fetch-happen/node_modules/minipass-fetch": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.0.2.tgz", + "integrity": "sha512-M63u5yWX0yxY1C3DcLVY1xWai0pNM3qa1xCMXFgdejY5F/NTmyzNVHGcBxKerX51lssqxwWWTjpg/ZPuD39gOQ==", + "inBundle": true, + "dependencies": { + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, "node_modules/markdown-escapes": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz", @@ -11625,9 +11642,9 @@ } }, "agentkeepalive": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.2.0.tgz", - "integrity": "sha512-0PhAp58jZNw13UJv7NVdTGb0ZcghHUb3DrZ046JiiJY/BOaTTpbwdHq2VObPCBV8M2GPh7sgrJ3AQ8Ey468LJw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.2.1.tgz", + "integrity": "sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA==", "requires": { "debug": "^4.1.0", "depd": "^1.1.2", @@ -14655,20 +14672,20 @@ "peer": true }, "make-fetch-happen": { - "version": "10.0.3", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.3.tgz", - "integrity": "sha512-CzarPHynPpHjhF5in/YapnO44rSZeYX5VCMfdXa99+gLwpbfFLh20CWa6dP/taV9Net9PWJwXNKtp/4ZTCQnag==", + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.4.tgz", + "integrity": "sha512-CiReW6usy3UXby5N46XjWfLPFPq1glugCszh18I0NYJCwr129ZAx9j3Dlv+cRsK0q3VjlVysEzhdtdw2+NhdYA==", "requires": { - "agentkeepalive": "^4.2.0", + "agentkeepalive": "^4.2.1", "cacache": "^15.3.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "lru-cache": "^7.3.1", + "lru-cache": "^7.4.0", "minipass": "^3.1.6", "minipass-collect": "^1.0.2", - "minipass-fetch": "^1.4.1", + "minipass-fetch": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", @@ -14678,9 +14695,20 @@ }, "dependencies": { "lru-cache": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.3.1.tgz", - "integrity": "sha512-nX1x4qUrKqwbIAhv4s9et4FIUVzNOpeY07bsjGUy8gwJrXH/wScImSQqXErmo/b2jZY2r0mohbLA9zVj7u1cNw==" + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.4.0.tgz", + "integrity": "sha512-YOfuyWa/Ee+PXbDm40j9WXyJrzQUynVbgn4Km643UYcWNcrSfRkKL0WaiUcxcIbkXcVTgNpDqSnPXntWXT75cw==" + }, + "minipass-fetch": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.0.2.tgz", + "integrity": "sha512-M63u5yWX0yxY1C3DcLVY1xWai0pNM3qa1xCMXFgdejY5F/NTmyzNVHGcBxKerX51lssqxwWWTjpg/ZPuD39gOQ==", + "requires": { + "encoding": "^0.1.13", + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + } } } }, diff --git a/package.json b/package.json index 7bae1d5976591..2420c71317df1 100644 --- a/package.json +++ b/package.json @@ -90,7 +90,7 @@ "libnpmsearch": "^5.0.0", "libnpmteam": "^4.0.0", "libnpmversion": "^3.0.0", - "make-fetch-happen": "^10.0.3", + "make-fetch-happen": "^10.0.4", "minipass": "^3.1.6", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4",