diff --git a/packages/jsii-calc-lib/lib/index.ts b/packages/jsii-calc-lib/lib/index.ts index e7d0a1a801..fc05371a8f 100644 --- a/packages/jsii-calc-lib/lib/index.ts +++ b/packages/jsii-calc-lib/lib/index.ts @@ -7,20 +7,27 @@ export abstract class Value extends base.Base { /** * The value. */ - abstract readonly value: number + public abstract readonly value: number; /** * String representation of the value. */ - toString() { + public toString() { return this.value.toString(); } } +/** + * The general contract for a concrete number. + */ +export interface IDoublable { + readonly doubleValue: number; +} + /** * Represents a concrete number. */ -export class Number extends Value { +export class Number extends Value implements IDoublable { /** * Creates a Number object. * @param value The number. @@ -41,7 +48,7 @@ export class Number extends Value { * Represents an operation on values. */ export abstract class Operation extends Value { - abstract toString(): string + public abstract toString(): string; } /** diff --git a/packages/jsii-calc-lib/test/assembly.jsii b/packages/jsii-calc-lib/test/assembly.jsii index e351e0ce08..dc6ac23805 100644 --- a/packages/jsii-calc-lib/test/assembly.jsii +++ b/packages/jsii-calc-lib/test/assembly.jsii @@ -92,6 +92,25 @@ ], "name": "EnumFromScopedModule" }, + "@scope/jsii-calc-lib.IDoublable": { + "assembly": "@scope/jsii-calc-lib", + "docs": { + "comment": "The general contract for a concrete number." + }, + "fqn": "@scope/jsii-calc-lib.IDoublable", + "kind": "interface", + "name": "IDoublable", + "properties": [ + { + "abstract": true, + "immutable": true, + "name": "doubleValue", + "type": { + "primitive": "number" + } + } + ] + }, "@scope/jsii-calc-lib.IFriendly": { "assembly": "@scope/jsii-calc-lib", "docs": { @@ -184,6 +203,11 @@ } ] }, + "interfaces": [ + { + "fqn": "@scope/jsii-calc-lib.IDoublable" + } + ], "kind": "class", "name": "Number", "properties": [ @@ -193,6 +217,9 @@ }, "immutable": true, "name": "doubleValue", + "overrides": { + "fqn": "@scope/jsii-calc-lib.IDoublable" + }, "type": { "primitive": "number" } @@ -324,5 +351,5 @@ } }, "version": "0.7.8", - "fingerprint": "16sTfW7oHGAWfPOj50gWvXsI1REjbNbpk7VUpG1JVVI=" + "fingerprint": "HzcyHys0b9gFmP4dogeIJmGE6GVtrSo/P0S54Vd/X8U=" } diff --git a/packages/jsii-calc/lib/compliance.ts b/packages/jsii-calc/lib/compliance.ts index c8ab84a415..783a5a94cf 100644 --- a/packages/jsii-calc/lib/compliance.ts +++ b/packages/jsii-calc/lib/compliance.ts @@ -1,5 +1,5 @@ // tslint:disable -import { Value, Number, IFriendly, MyFirstStruct, StructWithOnlyOptionals, EnumFromScopedModule } from '@scope/jsii-calc-lib'; +import { Value, Number, IFriendly, IDoublable, MyFirstStruct, StructWithOnlyOptionals, EnumFromScopedModule } from '@scope/jsii-calc-lib'; import * as fs from 'fs'; import * as path from 'path'; import * as os from 'os'; @@ -574,7 +574,7 @@ export class AllowedMethodNames { } export interface IReturnsNumber { - obtainNumber(): Number; + obtainNumber(): IDoublable; readonly numberProp: Number; } @@ -938,4 +938,4 @@ export interface IInterfaceWithMethods { */ export interface IInterfaceThatShouldNotBeADataType extends IInterfaceWithMethods { readonly otherValue: string; -} \ No newline at end of file +} diff --git a/packages/jsii-calc/test/assembly.jsii b/packages/jsii-calc/test/assembly.jsii index 2aa5115a4b..c6ffb76b77 100644 --- a/packages/jsii-calc/test/assembly.jsii +++ b/packages/jsii-calc/test/assembly.jsii @@ -436,7 +436,7 @@ "type": { "collection": { "elementtype": { - "primitive": "number" + "fqn": "@scope/jsii-calc-lib.Number" }, "kind": "map" } @@ -486,6 +486,9 @@ }, { "primitive": "number" + }, + { + "fqn": "@scope/jsii-calc-lib.Number" } ] } @@ -507,6 +510,9 @@ }, { "fqn": "jsii-calc.Multiply" + }, + { + "fqn": "@scope/jsii-calc-lib.Number" } ] } @@ -1559,7 +1565,7 @@ "abstract": true, "name": "obtainNumber", "returns": { - "primitive": "number" + "fqn": "@scope/jsii-calc-lib.IDoublable" } } ], @@ -1570,7 +1576,7 @@ "immutable": true, "name": "numberProp", "type": { - "primitive": "number" + "fqn": "@scope/jsii-calc-lib.Number" } } ] @@ -3401,5 +3407,5 @@ } }, "version": "0.7.8", - "fingerprint": "fhzPkiQLwsWAnEdA5+YEotaWom2Av1au0q2FzpexXaQ=" + "fingerprint": "jHSXTzCSZbwYMvLKpeZB6SE8hNgYgt9/2JF1ihM41SI=" } diff --git a/packages/jsii-dotnet-runtime-test/test/Amazon.JSII.Runtime.IntegrationTests/ComplianceTests.cs b/packages/jsii-dotnet-runtime-test/test/Amazon.JSII.Runtime.IntegrationTests/ComplianceTests.cs index 9b287e9483..ff6747a7be 100644 --- a/packages/jsii-dotnet-runtime-test/test/Amazon.JSII.Runtime.IntegrationTests/ComplianceTests.cs +++ b/packages/jsii-dotnet-runtime-test/test/Amazon.JSII.Runtime.IntegrationTests/ComplianceTests.cs @@ -83,10 +83,10 @@ public void CollectionTypes() Assert.Equal("World", types.ArrayProperty[1]); // map - IDictionary map = new Dictionary(); - map["Foo"] = 123; + IDictionary map = new Dictionary(); + map["Foo"] = new Number(123); types.MapProperty = map; - Assert.Equal((double) 123, types.MapProperty["Foo"]); + Assert.Equal((double) 123, types.MapProperty["Foo"].Value); } [Fact(DisplayName = Prefix + nameof(DynamicTypes))] @@ -818,6 +818,43 @@ public void TestClassWithPrivateConstructorAndAutomaticProperties() Assert.Equal("Hello", obj.ReadOnlyString); } + [Fact(DisplayName = Prefix + nameof(TestReturnInterfaceFromOverride))] + public void TestReturnInterfaceFromOverride() + { + var n = 1337; + var obj = new OverrideReturnsObject(); + var arg = new NumberReturner(n); + Assert.Equal(4 * n, obj.Test(arg)); + } + + class NumberReturner : DeputyBase, IIReturnsNumber + { + public NumberReturner(double number) + { + NumberProp = new Number(number); + } + + [JsiiProperty("numberProp", "{\"fqn\":\"@scope/jsii-calc-lib.Number\"}", true)] + public Number NumberProp { get; } + + [JsiiMethod("obtainNumber", "{\"fqn\":\"@scope/jsii-calc-lib.IDoublable\"}", "[]",true)] + public IIDoublable ObtainNumber() + { + return new Doublable(this.NumberProp); + } + + class Doublable : DeputyBase, IIDoublable + { + public Doublable(Number number) + { + this.DoubleValue = number.DoubleValue; + } + + [JsiiProperty("doubleValue","{\"primitive\":\"number\"}",true)] + public Double DoubleValue { get; } + } + } + class MulTen : Multiply { public MulTen(int value) diff --git a/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/CallbackExtensions.cs b/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/CallbackExtensions.cs index ca1c259831..28b93c68b1 100644 --- a/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/CallbackExtensions.cs +++ b/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/CallbackExtensions.cs @@ -15,10 +15,11 @@ public static object InvokeCallback(this Callback callback, IReferenceMap refere { try { - object frameworkResult = callback.InvokeCallbackCore(referenceMap); + TypeReference returnType; + object frameworkResult = callback.InvokeCallbackCore(referenceMap, out returnType); converter.TryConvert( - new TypeReference(primitive: PrimitiveType.Any), + returnType ?? new TypeReference(primitive: PrimitiveType.Any), referenceMap, frameworkResult, out object result @@ -41,28 +42,29 @@ out object result } } - static object InvokeCallbackCore(this Callback callback, IReferenceMap referenceMap) + static object InvokeCallbackCore(this Callback callback, IReferenceMap referenceMap, out TypeReference returnType) { if (callback.Invoke != null) { - return InvokeMethod(callback.Invoke, referenceMap); + return InvokeMethod(callback.Invoke, referenceMap, out returnType); } if (callback.Get != null) { - return InvokeGetter(callback.Get, referenceMap); + return InvokeGetter(callback.Get, referenceMap, out returnType); } if (callback.Set != null) { InvokeSetter(callback.Set, referenceMap); + returnType = null; return null; } throw new ArgumentException("Callback does not specificy a method, getter, or setter to invoke"); } - static object InvokeMethod(InvokeRequest request, IReferenceMap referenceMap) + static object InvokeMethod(InvokeRequest request, IReferenceMap referenceMap, out TypeReference returnType) { request = request ?? throw new ArgumentNullException(nameof(request)); DeputyBase deputy = referenceMap.GetOrCreateNativeReference(request.ObjectReference); @@ -74,10 +76,13 @@ static object InvokeMethod(InvokeRequest request, IReferenceMap referenceMap) throw new InvalidOperationException($"Received callback for {deputy.GetType().Name}.{request.Method} getter, but this method does not exist"); } + JsiiMethodAttribute attribute = methodInfo.GetCustomAttribute(); + returnType = attribute?.Returns; + return methodInfo.Invoke(deputy, request.Arguments); } - static object InvokeGetter(GetRequest request, IReferenceMap referenceMap) + static object InvokeGetter(GetRequest request, IReferenceMap referenceMap, out TypeReference returnType) { request = request ?? throw new ArgumentNullException(nameof(request)); DeputyBase deputy = referenceMap.GetOrCreateNativeReference(request.ObjectReference); @@ -88,6 +93,9 @@ static object InvokeGetter(GetRequest request, IReferenceMap referenceMap) throw new InvalidOperationException($"Received callback for {deputy.GetType().Name}.{request.Property} getter, but this property does not exist"); } + JsiiPropertyAttribute attribute = propertyInfo.GetCustomAttribute(); + returnType = attribute?.Type; + MethodInfo methodInfo = propertyInfo.GetGetMethod(); if (methodInfo == null) { diff --git a/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/Services/Converters/FrameworkToJsiiConverter.cs b/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/Services/Converters/FrameworkToJsiiConverter.cs index 9dabadcb95..f4cdbc674e 100644 --- a/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/Services/Converters/FrameworkToJsiiConverter.cs +++ b/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/Services/Converters/FrameworkToJsiiConverter.cs @@ -251,6 +251,10 @@ protected override bool TryConvertMap(IReferenceMap referenceMap, TypeReference return false; } + if (convertedElement != null && !(convertedElement is String) && !convertedElement.GetType().IsPrimitive) + { + convertedElement = JObject.FromObject(convertedElement); + } resultObject.Add(new JProperty(key, convertedElement)); } diff --git a/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/jsii-runtime/jsii-runtime.js b/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/jsii-runtime/jsii-runtime.js new file mode 100644 index 0000000000..6ac1995bb6 --- /dev/null +++ b/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/jsii-runtime/jsii-runtime.js @@ -0,0 +1,2 @@ +!function(e){var t={};function i(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,i),r.l=!0,r.exports}i.m=e,i.c=t,i.d=function(e,t,n){i.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},i.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},i.t=function(e,t){if(1&t&&(e=i(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(i.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var r in e)i.d(n,r,function(t){return e[t]}.bind(null,r));return n},i.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return i.d(t,"a",t),t},i.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},i.p="",i(i.s=42)}([function(e,t){e.exports=require("path")},function(e,t,i){var n=i(2),r=i(49),s=i(51),o=[],a=i(53);var c=function(){};function h(e){r(e),e.gracefulify=h,e.FileReadStream=p,e.FileWriteStream=m,e.createReadStream=function(e,t){return new p(e,t)},e.createWriteStream=function(e,t){return new m(e,t)};var t=e.readFile;e.readFile=function(e,i,n){"function"==typeof i&&(n=i,i=null);return function e(i,n,r){return t(i,n,function(t){!t||"EMFILE"!==t.code&&"ENFILE"!==t.code?("function"==typeof r&&r.apply(this,arguments),u()):l([e,[i,n,r]])})}(e,i,n)};var i=e.writeFile;e.writeFile=function(e,t,n,r){"function"==typeof n&&(r=n,n=null);return function e(t,n,r,s){return i(t,n,r,function(i){!i||"EMFILE"!==i.code&&"ENFILE"!==i.code?("function"==typeof s&&s.apply(this,arguments),u()):l([e,[t,n,r,s]])})}(e,t,n,r)};var n=e.appendFile;n&&(e.appendFile=function(e,t,i,r){"function"==typeof i&&(r=i,i=null);return function e(t,i,r,s){return n(t,i,r,function(n){!n||"EMFILE"!==n.code&&"ENFILE"!==n.code?("function"==typeof s&&s.apply(this,arguments),u()):l([e,[t,i,r,s]])})}(e,t,i,r)});var o=e.readdir;function a(t){return o.apply(e,t)}if(e.readdir=function(e,t,i){var n=[e];"function"!=typeof t?n.push(t):i=t;return n.push(function(e,t){t&&t.sort&&t.sort(),!e||"EMFILE"!==e.code&&"ENFILE"!==e.code?("function"==typeof i&&i.apply(this,arguments),u()):l([a,[n]])}),a(n)},"v0.8"===process.version.substr(0,4)){var c=s(e);p=c.ReadStream,m=c.WriteStream}var f=e.ReadStream;p.prototype=Object.create(f.prototype),p.prototype.open=function(){var e=this;b(e.path,e.flags,e.mode,function(t,i){t?(e.autoClose&&e.destroy(),e.emit("error",t)):(e.fd=i,e.emit("open",i),e.read())})};var d=e.WriteStream;function p(e,t){return this instanceof p?(f.apply(this,arguments),this):p.apply(Object.create(p.prototype),arguments)}function m(e,t){return this instanceof m?(d.apply(this,arguments),this):m.apply(Object.create(m.prototype),arguments)}m.prototype=Object.create(d.prototype),m.prototype.open=function(){var e=this;b(e.path,e.flags,e.mode,function(t,i){t?(e.destroy(),e.emit("error",t)):(e.fd=i,e.emit("open",i))})},e.ReadStream=p,e.WriteStream=m;var y=e.open;function b(e,t,i,n){return"function"==typeof i&&(n=i,i=null),function e(t,i,n,r){return y(t,i,n,function(s,o){!s||"EMFILE"!==s.code&&"ENFILE"!==s.code?("function"==typeof r&&r.apply(this,arguments),u()):l([e,[t,i,n,r]])})}(e,t,i,n)}return e.open=b,e}function l(e){c("ENQUEUE",e[0].name,e[1]),o.push(e)}function u(){var e=o.shift();e&&(c("RETRY",e[0].name,e[1]),e[0].apply(null,e[1]))}a.debuglog?c=a.debuglog("gfs4"):/\bgfs4\b/i.test(process.env.NODE_DEBUG||"")&&(c=function(){var e=a.format.apply(a,arguments);e="GFS4: "+e.split(/\n/).join("\nGFS4: "),console.error(e)}),/\bgfs4\b/i.test(process.env.NODE_DEBUG||"")&&process.on("exit",function(){c(o),i(12).equal(o.length,0)}),e.exports=h(i(27)),process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH&&(e.exports=h(n)),e.exports.close=n.close=function(e){return function(t,i){return e.call(n,t,function(e){e||u(),"function"==typeof i&&i.apply(this,arguments)})}}(n.close),e.exports.closeSync=n.closeSync=function(e){return function(t){var i=e.apply(n,arguments);return u(),i}}(n.closeSync)},function(e,t){e.exports=require("fs")},function(e,t,i){"use strict";t.fromCallback=function(e){return Object.defineProperty(function(){if("function"!=typeof arguments[arguments.length-1])return new Promise((t,i)=>{arguments[arguments.length]=((e,n)=>{if(e)return i(e);t(n)}),arguments.length++,e.apply(this,arguments)});e.apply(this,arguments)},"name",{value:e.name})},t.fromPromise=function(e){return Object.defineProperty(function(){const t=arguments[arguments.length-1];if("function"!=typeof t)return e.apply(this,arguments);e.apply(this,arguments).then(e=>t(null,e),t)},"name",{value:e.name})}},function(e,t,i){"use strict";const n=(0,i(3).fromCallback)(i(57)),r=i(58);e.exports={mkdirs:n,mkdirsSync:r,mkdirp:n,mkdirpSync:r,ensureDir:n,ensureDirSync:r}},function(e,t,i){"use strict";const n=i(3).fromPromise,r=i(26);e.exports={pathExists:n(function(e){return r.access(e).then(()=>!0).catch(()=>!1)}),pathExistsSync:r.existsSync}},function(e,t,i){"use strict";var n=i(20),r=t.ValidationError=function(e,t,i,n,r,s){n&&(this.property=n),e&&(this.message=e),i&&(i.id?this.schema=i.id:this.schema=i),t&&(this.instance=t),this.name=r,this.argument=s,this.stack=this.toString()};r.prototype.toString=function(){return this.property+" "+this.message};var s=t.ValidatorResult=function(e,t,i,n){this.instance=e,this.schema=t,this.propertyPath=n.propertyPath,this.errors=[],this.throwError=i&&i.throwError,this.disableFormat=i&&!0===i.disableFormat};function o(e,t){return t+": "+e.toString()+"\n"}s.prototype.addError=function(e){var t;if("string"==typeof e)t=new r(e,this.instance,this.schema,this.propertyPath);else{if(!e)throw new Error("Missing error detail");if(!e.message)throw new Error("Missing error message");if(!e.name)throw new Error("Missing validator type");t=new r(e.message,this.instance,this.schema,this.propertyPath,e.name,e.argument)}if(this.throwError)throw t;return this.errors.push(t),t},s.prototype.importErrors=function(e){"string"==typeof e||e&&e.validatorType?this.addError(e):e&&e.errors&&Array.prototype.push.apply(this.errors,e.errors)},s.prototype.toString=function(e){return this.errors.map(o).join("")},Object.defineProperty(s.prototype,"valid",{get:function(){return!this.errors.length}});var a=t.SchemaError=function e(t,i){this.message=t,this.schema=i,Error.call(this,t),Error.captureStackTrace(this,e)};a.prototype=Object.create(Error.prototype,{constructor:{value:a,enumerable:!1},name:{value:"SchemaError",enumerable:!1}});var c=t.SchemaContext=function(e,t,i,n,r){this.schema=e,this.options=t,this.propertyPath=i,this.base=n,this.schemas=r};c.prototype.resolve=function(e){return n.resolve(this.base,e)},c.prototype.makeChild=function(e,t){var i=void 0===t?this.propertyPath:this.propertyPath+l(t),r=n.resolve(this.base,e.id||""),s=new c(e,this.options,i,r,Object.create(this.schemas));return e.id&&!s.schemas[r]&&(s.schemas[r]=e),s};var h=t.FORMAT_REGEXPS={"date-time":/^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])[tT ](2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])(\.\d+)?([zZ]|[+-]([0-5][0-9]):(60|[0-5][0-9]))$/,date:/^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])$/,time:/^(2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])$/,email:/^(?:[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+\.)*[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+@(?:(?:(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!\.)){0,61}[a-zA-Z0-9]?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!$)){0,61}[a-zA-Z0-9]?)|(?:\[(?:(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\.){3}(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\]))$/,"ip-address":/^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/,ipv6:/^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/,uri:/^[a-zA-Z][a-zA-Z0-9+-.]*:[^\s]*$/,color:/^(#?([0-9A-Fa-f]{3}){1,2}\b|aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow|(rgb\(\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*\))|(rgb\(\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*\)))$/,hostname:/^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/,"host-name":/^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/,alpha:/^[a-zA-Z]+$/,alphanumeric:/^[a-zA-Z0-9]+$/,"utc-millisec":function(e){return"string"==typeof e&&parseFloat(e)===parseInt(e,10)&&!isNaN(e)},regex:function(e){var t=!0;try{new RegExp(e)}catch(e){t=!1}return t},style:/\s*(.+?):\s*([^;]+);?/g,phone:/^\+(?:[0-9] ?){6,14}[0-9]$/};h.regexp=h.regex,h.pattern=h.regex,h.ipv4=h["ip-address"],t.isFormat=function(e,t,i){if("string"==typeof e&&void 0!==h[t]){if(h[t]instanceof RegExp)return h[t].test(e);if("function"==typeof h[t])return h[t](e)}else if(i&&i.customFormats&&"function"==typeof i.customFormats[t])return i.customFormats[t](e);return!0};var l=t.makeSuffix=function(e){return(e=e.toString()).match(/[.\s\[\]]/)||e.match(/^[\d]/)?e.match(/^\d+$/)?"["+e+"]":"["+JSON.stringify(e)+"]":"."+e};function u(e,t){var i=Array.isArray(t),n=i&&[]||{};return i?(e=e||[],n=n.concat(e),t.forEach(function(e,t,i,n){"object"==typeof i?t[n]=u(e[n],i):-1===e.indexOf(i)&&t.push(i)}.bind(null,e,n))):(e&&"object"==typeof e&&Object.keys(e).forEach(function(e,t,i){t[i]=e[i]}.bind(null,e,n)),Object.keys(t).forEach(function(e,t,i,n){"object"==typeof t[n]&&t[n]&&e[n]?i[n]=u(e[n],t[n]):i[n]=t[n]}.bind(null,e,t,n))),n}function f(e){return"/"+encodeURIComponent(e).replace(/~/g,"%7E")}t.deepCompareStrict=function e(t,i){if(typeof t!=typeof i)return!1;if(t instanceof Array)return i instanceof Array&&(t.length===i.length&&t.every(function(n,r){return e(t[r],i[r])}));if("object"==typeof t){if(!t||!i)return t===i;var n=Object.keys(t),r=Object.keys(i);return n.length===r.length&&n.every(function(n){return e(t[n],i[n])})}return t===i},e.exports.deepMerge=u,t.objectGetPath=function(e,t){for(var i,n=t.split("/").slice(1);"string"==typeof(i=n.shift());){var r=decodeURIComponent(i.replace(/~0/,"~").replace(/~1/g,"/"));if(!(r in e))return;e=e[r]}return e},t.encodePath=function(e){return e.map(f).join("")},t.getDecimalPlaces=function(e){var t=0;if(isNaN(e))return t;"number"!=typeof e&&(e=Number(e));var i=e.toString().split("e");if(2===i.length){if("-"!==i[1][0])return t;t=Number(i[1].slice(1))}var n=i[0].split(".");return 2===n.length&&(t+=n[1].length),t}},function(e,t,i){"use strict";let n=Buffer;n.alloc||(n=i(33).Buffer),e.exports=n},function(e,t,i){"use strict";const n=new Map([["C","cwd"],["f","file"],["z","gzip"],["P","preservePaths"],["U","unlink"],["strip-components","strip"],["stripComponents","strip"],["keep-newer","newer"],["keepNewer","newer"],["keep-newer-files","newer"],["keepNewerFiles","newer"],["k","keep"],["keep-existing","keep"],["keepExisting","keep"],["m","noMtime"],["no-mtime","noMtime"],["p","preserveOwner"],["L","follow"],["h","follow"]]);e.exports=(e=>e?Object.keys(e).map(t=>[n.has(t)?n.get(t):t,e[t]]).reduce((e,t)=>(e[t[0]]=t[1],e),Object.create(null)):{})},function(e,t,i){"use strict";const n=i(14),r=i(15),s=Symbol("EOF"),o=Symbol("maybeEmitEnd"),a=Symbol("emittedEnd"),c=Symbol("closed"),h=Symbol("read"),l=Symbol("flush"),u="1"!==process.env._MP_NO_ITERATOR_SYMBOLS_,f=u&&Symbol.asyncIterator||Symbol("asyncIterator not implemented"),d=u&&Symbol.iterator||Symbol("iterator not implemented"),p=Symbol("flushChunk"),m=i(87).StringDecoder,y=Symbol("encoding"),b=Symbol("decoder"),w=Symbol("flowing"),g=Symbol("resume"),v=Symbol("bufferLength"),S=Symbol("bufferPush"),E=Symbol("bufferShift"),k=Symbol("objectMode");let _=Buffer;_.alloc||(_=i(33).Buffer),e.exports=class extends n{constructor(e){super(),this[w]=!1,this.pipes=new r,this.buffer=new r,this[k]=e&&e.objectMode||!1,this[k]?this[y]=null:this[y]=e&&e.encoding||null,"buffer"===this[y]&&(this[y]=null),this[b]=this[y]?new m(this[y]):null,this[s]=!1,this[a]=!1,this[c]=!1,this.writable=!0,this.readable=!0,this[v]=0}get bufferLength(){return this[v]}get encoding(){return this[y]}set encoding(e){if(this[k])throw new Error("cannot set encoding in objectMode");if(this[y]&&e!==this[y]&&(this[b]&&this[b].lastNeed||this[v]))throw new Error("cannot change encoding");this[y]!==e&&(this[b]=e?new m(e):null,this.buffer.length&&(this.buffer=this.buffer.map(e=>this[b].write(e)))),this[y]=e}setEncoding(e){this.encoding=e}write(e,t,i){if(this[s])throw new Error("write after end");"function"==typeof t&&(i=t,t="utf8"),t||(t="utf8"),"string"!=typeof e||this[k]||t===this[y]&&!this[b].lastNeed||(e=_.from(e,t)),_.isBuffer(e)&&this[y]&&(e=this[b].write(e));try{return this.flowing?(this.emit("data",e),this.flowing):(this[S](e),!1)}finally{this.emit("readable"),i&&i()}}read(e){try{return 0===this[v]||0===e||e>this[v]?null:(this[k]&&(e=null),this.buffer.length>1&&!this[k]&&(this.encoding?this.buffer=new r([Array.from(this.buffer).join("")]):this.buffer=new r([_.concat(Array.from(this.buffer),this[v])])),this[h](e||null,this.buffer.head.value))}finally{this[o]()}}[h](e,t){return e===t.length||null===e?this[E]():(this.buffer.head.value=t.slice(e),t=t.slice(0,e),this[v]-=e),this.emit("data",t),this.buffer.length||this[s]||this.emit("drain"),t}end(e,t,i){"function"==typeof e&&(i=e,e=null),"function"==typeof t&&(i=t,t="utf8"),e&&this.write(e,t),i&&this.once("end",i),this[s]=!0,this.writable=!1,this.flowing&&this[o]()}[g](){this[w]=!0,this.emit("resume"),this.buffer.length?this[l]():this[s]?this[o]():this.emit("drain")}resume(){return this[g]()}pause(){this[w]=!1}get flowing(){return this[w]}[S](e){return this[k]?this[v]+=1:this[v]+=e.length,this.buffer.push(e)}[E](){return this.buffer.length&&(this[k]?this[v]-=1:this[v]-=this.buffer.head.value.length),this.buffer.shift()}[l](){do{}while(this[p](this[E]()));this.buffer.length||this[s]||this.emit("drain")}[p](e){return!!e&&(this.emit("data",e),this.flowing)}pipe(e,t){e!==process.stdout&&e!==process.stderr||((t=t||{}).end=!1);const i={dest:e,opts:t,ondrain:e=>this[g]()};return this.pipes.push(i),e.on("drain",i.ondrain),this[g](),e}addListener(e,t){return this.on(e,t)}on(e,t){try{return super.on(e,t)}finally{"data"!==e||this.pipes.length||this.flowing?"end"===e&&this[a]&&(super.emit("end"),this.removeAllListeners("end")):this[g]()}}get emittedEnd(){return this[a]}[o](){!this[a]&&0===this.buffer.length&&this[s]&&(this.emit("end"),this.emit("prefinish"),this.emit("finish"),this[c]&&this.emit("close"))}emit(e,t){if("data"===e){if(!t)return;this.pipes.length&&this.pipes.forEach(e=>e.dest.write(t)||this.pause())}else if("end"===e){if(!0===this[a])return;this[a]=!0,this.readable=!1,this[b]&&(t=this[b].end())&&(this.pipes.forEach(e=>e.dest.write(t)),super.emit("data",t)),this.pipes.forEach(e=>{e.dest.removeListener("drain",e.ondrain),e.opts&&!1===e.opts.end||e.dest.end()})}else if("close"===e&&(this[c]=!0,!this[a]))return;const i=new Array(arguments.length);if(i[0]=e,i[1]=t,arguments.length>2)for(let e=2;e{const i=[];this.on("data",e=>i.push(e)),this.on("end",()=>e(i)),this.on("error",t)})}[f](){return{next:()=>{const e=this.read();if(null!==e)return Promise.resolve({done:!1,value:e});if(this[s])return Promise.resolve({done:!0});let t=null,i=null;const n=e=>{this.removeListener("data",r),this.removeListener("end",o),i(e)},r=e=>{this.removeListener("error",n),this.removeListener("end",o),this.pause(),t({value:e,done:!!this[s]})},o=()=>{this.removeListener("error",n),this.removeListener("data",r),t({done:!0})};return new Promise((e,s)=>{i=s,t=e,this.once("error",n),this.once("end",o),this.once("data",r),this.resume()})}}}[d](){return{next:()=>{const e=this.read();return{value:e,done:null===e}}}}}},function(e,t,i){"use strict";const n=i(7),r=i(17),s=i(0).posix,o=i(89),a=Symbol("slurp"),c=Symbol("type");const h=(e,t)=>{let i,r=e,o="";const a=s.parse(e).root||".";if(n.byteLength(r)<100)i=[r,o,!1];else{o=s.dirname(r),r=s.basename(r);do{n.byteLength(r)<=100&&n.byteLength(o)<=t?i=[r,o,!1]:n.byteLength(r)>100&&n.byteLength(o)<=t?i=[r.substr(0,99),o,!0]:(r=s.join(s.basename(o),r),o=s.dirname(o))}while(o!==a&&!i);i||(i=[e.substr(0,99),"",!0])}return i},l=(e,t,i)=>e.slice(t,t+i).toString("utf8").replace(/\0.*/,""),u=(e,t,i)=>f(d(e,t,i)),f=e=>null===e?null:new Date(1e3*e),d=(e,t,i)=>128&e[t]?o.parse(e.slice(t,t+i)):p(e,t,i),p=(e,t,i)=>(e=>isNaN(e)?null:e)(parseInt(e.slice(t,t+i).toString("utf8").replace(/\0.*$/,"").trim(),8)),m={12:8589934591,8:2097151},y=(e,t,i,n)=>null!==n&&(n>m[i]||n<0?(o.encode(n,e.slice(t,t+i)),!0):(b(e,t,i,n),!1)),b=(e,t,i,n)=>e.write(w(n,i),t,i,"ascii"),w=(e,t)=>g(Math.floor(e).toString(8),t),g=(e,t)=>(e.length===t-1?e:new Array(t-e.length-1).join("0")+e+" ")+"\0",v=(e,t,i,n)=>null!==n&&y(e,t,i,n.getTime()/1e3),S=new Array(156).join("\0"),E=(e,t,i,r)=>null!==r&&(e.write(r+S,t,i,"utf8"),r.length!==n.byteLength(r)||r.length>i);e.exports=class{constructor(e,t,i,r){this.cksumValid=!1,this.needPax=!1,this.nullBlock=!1,this.block=null,this.path=null,this.mode=null,this.uid=null,this.gid=null,this.size=null,this.mtime=null,this.cksum=null,this[c]="0",this.linkpath=null,this.uname=null,this.gname=null,this.devmaj=0,this.devmin=0,this.atime=null,this.ctime=null,n.isBuffer(e)?this.decode(e,t||0,i,r):e&&this.set(e)}decode(e,t,i,n){if(t||(t=0),!(e&&e.length>=t+512))throw new Error("need 512 bytes for header");if(this.path=l(e,t,100),this.mode=d(e,t+100,8),this.uid=d(e,t+108,8),this.gid=d(e,t+116,8),this.size=d(e,t+124,12),this.mtime=u(e,t+136,12),this.cksum=d(e,t+148,12),this[a](i),this[a](n,!0),this[c]=l(e,t+156,1),""===this[c]&&(this[c]="0"),"0"===this[c]&&"/"===this.path.substr(-1)&&(this[c]="5"),"5"===this[c]&&(this.size=0),this.linkpath=l(e,t+157,100),"ustar\x0000"===e.slice(t+257,t+265).toString())if(this.uname=l(e,t+265,32),this.gname=l(e,t+297,32),this.devmaj=d(e,t+329,8),this.devmin=d(e,t+337,8),0!==e[t+475]){const i=l(e,t+345,155);this.path=i+"/"+this.path}else{const i=l(e,t+345,130);i&&(this.path=i+"/"+this.path),this.atime=u(e,t+476,12),this.ctime=u(e,t+488,12)}let r=256;for(let i=t;i=t+512))throw new Error("need 512 bytes for header");const i=this.ctime||this.atime?130:155,r=h(this.path||"",i),s=r[0],o=r[1];this.needPax=r[2],this.needPax=E(e,t,100,s)||this.needPax,this.needPax=y(e,t+100,8,this.mode)||this.needPax,this.needPax=y(e,t+108,8,this.uid)||this.needPax,this.needPax=y(e,t+116,8,this.gid)||this.needPax,this.needPax=y(e,t+124,12,this.size)||this.needPax,this.needPax=v(e,t+136,12,this.mtime)||this.needPax,e[t+156]=this[c].charCodeAt(0),this.needPax=E(e,t+157,100,this.linkpath)||this.needPax,e.write("ustar\x0000",t+257,8),this.needPax=E(e,t+265,32,this.uname)||this.needPax,this.needPax=E(e,t+297,32,this.gname)||this.needPax,this.needPax=y(e,t+329,8,this.devmaj)||this.needPax,this.needPax=y(e,t+337,8,this.devmin)||this.needPax,this.needPax=E(e,t+345,i,o)||this.needPax,0!==e[t+475]?this.needPax=E(e,t+345,155,o)||this.needPax:(this.needPax=E(e,t+345,130,o)||this.needPax,this.needPax=v(e,t+476,12,this.atime)||this.needPax,this.needPax=v(e,t+488,12,this.ctime)||this.needPax);let a=256;for(let i=t;ithis[v](e,t))}[v](e,t){e?this[g](e):(this[u]=t,this.emit("open",t),this[O]())}[y](){return Buffer.allocUnsafe(Math.min(this[R],this[P]))}[O](){if(!this[j]){this[j]=!0;const e=this[y]();if(0===e.length)return process.nextTick(()=>this[S](null,0,e));s.read(this[u],e,0,e.length,null,(e,t,i)=>this[S](e,t,i))}}[S](e,t,i){this[j]=!1,e?this[g](e):this[m](t,i)&&this[O]()}[h](){this[c]&&"number"==typeof this[u]&&(s.close(this[u],e=>this.emit("close")),this[u]=null)}[g](e){this[j]=!0,this[h](),this.emit("error",e)}[m](e,t){let i=!1;return this[P]-=e,e>0&&(i=super.write(ethis[v](e,t))}[v](e,t){this[I]&&"r+"===this[d]&&e&&"ENOENT"===e.code?(this[d]="w",this[k]()):e?this[g](e):(this[u]=t,this.emit("open",t),this[p]())}end(e,t){e&&this.write(e,t),this[l]=!0,this[$]||this[T].length||"number"!=typeof this[u]||this[E](null,0)}write(e,t){return"string"==typeof e&&(e=new Buffer(e,t)),this[l]?(this.emit("error",new Error("write() after end()")),!1):null===this[u]||this[$]||this[T].length?(this[T].push(e),this[w]=!0,!1):(this[$]=!0,this[N](e),!0)}[N](e){s.write(this[u],e,0,e.length,this[x],(e,t)=>this[E](e,t))}[E](e,t){e?this[g](e):(null!==this[x]&&(this[x]+=t),this[T].length?this[p]():(this[$]=!1,this[l]&&!this[f]?(this[f]=!0,this[h](),this.emit("finish")):this[w]&&(this[w]=!1,this.emit("drain"))))}[p](){if(0===this[T].length)this[l]&&this[E](null,0);else if(1===this[T].length)this[N](this[T].pop());else{const e=this[T];this[T]=[],L(this[u],e,this[x],(e,t)=>this[E](e,t))}}[h](){this[c]&&"number"==typeof this[u]&&(s.close(this[u],e=>this.emit("close")),this[u]=null)}}const L=(e,t,i,n)=>{const r=new a;r.oncomplete=((e,i)=>n(e,i,t)),o.writeBuffers(e,t,i,r)};t.ReadStream=A,t.ReadStreamSync=class extends A{[k](){let e=!0;try{this[v](null,s.openSync(this[_],"r")),e=!1}finally{e&&this[h]()}}[O](){let e=!0;try{if(!this[j]){for(this[j]=!0;;){const e=this[y](),t=0===e.length?0:s.readSync(this[u],e,0,e.length,null);if(!this[m](t,e))break}this[j]=!1}e=!1}finally{e&&this[h]()}}[h](){if(this[c]&&"number"==typeof this[u]){try{s.closeSync(this[u])}catch(e){}this[u]=null,this.emit("close")}}},t.WriteStream=M,t.WriteStreamSync=class extends M{[k](){let e;try{e=s.openSync(this[_],this[d],this[b])}catch(e){if(this[I]&&"r+"===this[d]&&e&&"ENOENT"===e.code)return this[d]="w",this[k]();throw e}this[v](null,e)}[h](){if(this[c]&&"number"==typeof this[u]){try{s.closeSync(this[u])}catch(e){}this[u]=null,this.emit("close")}}[N](e){try{this[E](null,s.writeSync(this[u],e,0,e.length,this[x]))}catch(e){this[E](e,0)}}}},function(e,t){e.exports=require("assert")},function(e,t,i){"use strict";const n=i(3).fromCallback,r=i(61);e.exports={remove:n(r),removeSync:r.sync}},function(e,t){e.exports=require("events")},function(e,t,i){"use strict";function n(e){var t=this;if(t instanceof n||(t=new n),t.tail=null,t.head=null,t.length=0,e&&"function"==typeof e.forEach)e.forEach(function(e){t.push(e)});else if(arguments.length>0)for(var i=0,r=arguments.length;i1)i=t;else{if(!this.head)throw new TypeError("Reduce of empty list with no initial value");n=this.head.next,i=this.head.value}for(var r=0;null!==n;r++)i=e(i,n.value,r),n=n.next;return i},n.prototype.reduceReverse=function(e,t){var i,n=this.tail;if(arguments.length>1)i=t;else{if(!this.tail)throw new TypeError("Reduce of empty list with no initial value");n=this.tail.prev,i=this.tail.value}for(var r=this.length-1;null!==n;r--)i=e(i,n.value,r),n=n.prev;return i},n.prototype.toArray=function(){for(var e=new Array(this.length),t=0,i=this.head;null!==i;t++)e[t]=i.value,i=i.next;return e},n.prototype.toArrayReverse=function(){for(var e=new Array(this.length),t=0,i=this.tail;null!==i;t++)e[t]=i.value,i=i.prev;return e},n.prototype.slice=function(e,t){(t=t||this.length)<0&&(t+=this.length),(e=e||0)<0&&(e+=this.length);var i=new n;if(tthis.length&&(t=this.length);for(var r=0,s=this.head;null!==s&&rthis.length&&(t=this.length);for(var r=this.length,s=this.tail;null!==s&&r>t;r--)s=s.prev;for(;null!==s&&r>e;r--,s=s.prev)i.push(s.value);return i},n.prototype.reverse=function(){for(var e=this.head,t=this.tail,i=e;null!==i;i=i.prev){var n=i.prev;i.prev=i.next,i.next=n}return this.head=t,this.tail=e,this};try{i(86)}catch(e){}},function(e,t,i){"use strict";i(17);const n=i(9),r=Symbol("slurp");e.exports=class extends n{constructor(e,t,i){switch(super(),this.extended=t,this.globalExtended=i,this.header=e,this.startBlockSize=512*Math.ceil(e.size/512),this.blockRemain=this.startBlockSize,this.remain=e.size,this.type=e.type,this.meta=!1,this.ignore=!1,this.type){case"File":case"OldFile":case"Link":case"SymbolicLink":case"CharacterDevice":case"BlockDevice":case"Directory":case"FIFO":case"ContiguousFile":case"GNUDumpDir":break;case"NextFileHasLongLinkpath":case"NextFileHasLongPath":case"OldGnuLongPath":case"GlobalExtendedHeader":case"ExtendedHeader":case"OldExtendedHeader":this.meta=!0;break;default:this.ignore=!0}this.path=e.path,this.mode=e.mode,this.mode&&(this.mode=4095&this.mode),this.uid=e.uid,this.gid=e.gid,this.uname=e.uname,this.gname=e.gname,this.size=e.size,this.mtime=e.mtime,this.atime=e.atime,this.ctime=e.ctime,this.linkpath=e.linkpath,this.uname=e.uname,this.gname=e.gname,t&&this[r](t),i&&this[r](i,!0)}write(e){const t=e.length;if(t>this.blockRemain)throw new Error("writing more to entry than is appropriate");const i=this.remain,n=this.blockRemain;return this.remain=Math.max(0,i-t),this.blockRemain=Math.max(0,n-t),!!this.ignore||(i>=t?super.write(e):super.write(e.slice(0,i)))}[r](e,t){for(let i in e)null===e[i]||void 0===e[i]||t&&"path"===i||(this[i]=e[i])}}},function(e,t,i){"use strict";t.name=new Map([["0","File"],["","OldFile"],["1","Link"],["2","SymbolicLink"],["3","CharacterDevice"],["4","BlockDevice"],["5","Directory"],["6","FIFO"],["7","ContiguousFile"],["g","GlobalExtendedHeader"],["x","ExtendedHeader"],["A","SolarisACL"],["D","GNUDumpDir"],["I","Inode"],["K","NextFileHasLongLinkpath"],["L","NextFileHasLongPath"],["M","ContinuationFile"],["N","OldGnuLongPath"],["S","SparseFile"],["V","TapeVolumeHeader"],["X","OldExtendedHeader"]]),t.code=new Map(Array.from(t.name).map(e=>[e[1],e[0]]))},function(e,t,i){"use strict";const n=i(24),r=(i(0),i(10)),s=i(14),o=i(15),a=i(16),c=i(23),h=i(35),l=Buffer.from([31,139]),u=Symbol("state"),f=Symbol("writeEntry"),d=Symbol("readEntry"),p=Symbol("nextEntry"),m=Symbol("processEntry"),y=Symbol("extendedHeader"),b=Symbol("globalExtendedHeader"),w=Symbol("meta"),g=Symbol("emitMeta"),v=Symbol("buffer"),S=Symbol("queue"),E=Symbol("ended"),k=Symbol("emittedEnd"),_=Symbol("emit"),x=Symbol("unzip"),T=Symbol("consumeChunk"),O=Symbol("consumeChunkSub"),R=Symbol("consumeBody"),j=Symbol("consumeMeta"),P=Symbol("consumeHeader"),F=Symbol("consuming"),N=Symbol("bufferConcat"),$=Symbol("maybeEnd"),I=Symbol("writing"),A=Symbol("aborted"),M=Symbol("onDone"),L=e=>!0;e.exports=n(class extends s{constructor(e){super(e=e||{}),e.ondone?this.on(M,e.ondone):this.on(M,e=>{this.emit("prefinish"),this.emit("finish"),this.emit("end"),this.emit("close")}),this.strict=!!e.strict,this.maxMetaEntrySize=e.maxMetaEntrySize||1048576,this.filter="function"==typeof e.filter?e.filter:L,this.writable=!0,this.readable=!1,this[S]=new o,this[v]=null,this[d]=null,this[f]=null,this[u]="begin",this[w]="",this[y]=null,this[b]=null,this[E]=!1,this[x]=null,this[A]=!1,"function"==typeof e.onwarn&&this.on("warn",e.onwarn),"function"==typeof e.onentry&&this.on("entry",e.onentry)}[P](e,t){const i=new r(e,t,this[y],this[b]);if(i.nullBlock)this[_]("nullBlock");else if(i.cksumValid)if(i.path){const e=i.type;if(/^(Symbolic)?Link$/.test(e)&&!i.linkpath)this.warn("invalid: linkpath required",i);else if(!/^(Symbolic)?Link$/.test(e)&&i.linkpath)this.warn("invalid: linkpath forbidden",i);else{const e=this[f]=new a(i,this[y],this[b]);e.meta?e.size>this.maxMetaEntrySize?(e.ignore=!0,this[_]("ignoredEntry",e),this[u]="ignore"):e.size>0&&(this[w]="",e.on("data",e=>this[w]+=e),this[u]="meta"):(this[y]=null,e.ignore=e.ignore||!this.filter(e.path,e),e.ignore?(this[_]("ignoredEntry",e),this[u]=e.remain?"ignore":"begin"):(e.remain?this[u]="body":(this[u]="begin",e.end()),this[d]?this[S].push(e):(this[S].push(e),this[p]())))}}else this.warn("invalid: path is required",i);else this.warn("invalid entry",i)}[m](e){let t=!0;return e?Array.isArray(e)?this.emit.apply(this,e):(this[d]=e,this.emit("entry",e),e.emittedEnd||(e.on("end",e=>this[p]()),t=!1)):(this[d]=null,t=!1),t}[p](){do{}while(this[m](this[S].shift()));if(!this[S].length){const e=this[d];!e||e.flowing||e.size===e.remain?this[I]||this.emit("drain"):e.once("drain",e=>this.emit("drain"))}}[R](e,t){const i=this[f],n=i.blockRemain,r=n>=e.length&&0===t?e:e.slice(t,t+n);return i.write(r),i.blockRemain||(this[u]="begin",this[f]=null,i.end()),r.length}[j](e,t){const i=this[f],n=this[R](e,t);return this[f]||this[g](i),n}[_](e,t,i){this[S].length||this[d]?this[S].push([e,t,i]):this.emit(e,t,i)}[g](e){switch(this[_]("meta",this[w]),e.type){case"ExtendedHeader":case"OldExtendedHeader":this[y]=c.parse(this[w],this[y],!1);break;case"GlobalExtendedHeader":this[b]=c.parse(this[w],this[b],!0);break;case"NextFileHasLongPath":case"OldGnuLongPath":this[y]=this[y]||Object.create(null),this[y].path=this[w].replace(/\0.*/,"");break;case"NextFileHasLongLinkpath":this[y]=this[y]||Object.create(null),this[y].linkpath=this[w].replace(/\0.*/,"");break;default:throw new Error("unknown meta: "+e.type)}}abort(e,t){this[A]=!0,this.warn(e,t),this.emit("abort",t),this.emit("error",t)}write(e){if(this[A])return;if(null===this[x]&&e){if(this[v]&&(e=Buffer.concat([this[v],e]),this[v]=null),e.lengththis[T](e)),this[x].on("error",e=>this.abort(e.message,e)),this[x].on("end",e=>{this[E]=!0,this[T]()}),this[I]=!0;const i=this[x][t?"end":"write"](e);return this[I]=!1,i}}this[I]=!0,this[x]?this[x].write(e):this[T](e),this[I]=!1;const t=!this[S].length&&(!this[d]||this[d].flowing);return t||this[S].length||this[d].once("drain",e=>this.emit("drain")),t}[N](e){e&&!this[A]&&(this[v]=this[v]?Buffer.concat([this[v],e]):e)}[$](){if(this[E]&&!this[k]&&!this[A]&&!this[F]){this[k]=!0;const e=this[f];if(e&&e.blockRemain){const t=this[v]?this[v].length:0;this.warn("Truncated input (needed "+e.blockRemain+" more bytes, only "+t+" available)",e),this[v]&&e.write(this[v]),e.end()}this[_](M)}}[T](e){if(this[F])this[N](e);else if(e||this[v]){if(this[F]=!0,this[v]){this[N](e);const t=this[v];this[v]=null,this[O](t)}else this[O](e);for(;this[v]&&this[v].length>=512&&!this[A];){const e=this[v];this[v]=null,this[O](e)}this[F]=!1}else this[$]();this[v]&&!this[E]||this[$]()}[O](e){let t=0,i=e.length;for(;t+512<=i&&!this[A];)switch(this[u]){case"begin":this[P](e,t),t+=512;break;case"ignore":case"body":t+=this[R](e,t);break;case"meta":t+=this[j](e,t);break;default:throw new Error("invalid state: "+this[u])}t already exists with different definition");return c[l]}c[l]=t,"#"==l[l.length-1]&&(c[l.substring(0,l.length-1)]=t)}o(l+"/items",t.items instanceof Array?t.items:[t.items]),o(l+"/extends",t.extends instanceof Array?t.extends:[t.extends]),i(l+"/additionalItems",t.additionalItems),a(l+"/properties",t.properties),i(l+"/additionalProperties",t.additionalProperties),a(l+"/definitions",t.definitions),a(l+"/patternProperties",t.patternProperties),a(l+"/dependencies",t.dependencies),o(l+"/disallow",t.disallow),o(l+"/allOf",t.allOf),o(l+"/anyOf",t.anyOf),o(l+"/oneOf",t.oneOf),i(l+"/not",t.not)}}function o(e,t){if(t instanceof Array)for(var n=0;nsuper.write(e)),this.zip.on("end",e=>super.end()),this.zip.on("drain",e=>this[N]()),this.on("resume",e=>this.zip.resume())):this.on("drain",this[N]),this.portable=!!e.portable,this.noDirRecurse=!!e.noDirRecurse,this.follow=!!e.follow,this.noMtime=!!e.noMtime,this.mtime=e.mtime||null,this.filter="function"==typeof e.filter?e.filter:e=>!0,this[m]=new u,this[v]=0,this.jobs=+e.jobs||4,this[w]=!1,this[p]=!1}[F](e){return super.write(e)}add(e){return this.write(e),this}end(e){return e&&this.write(e),this[p]=!0,this[b](),this}write(e){if(this[p])throw new Error("write after end");return e instanceof a?this[k](e):this[E](e),this.flowing}[k](e){const t=I.resolve(this.cwd,e.path);if(this.prefix&&(e.path=this.prefix+"/"+e.path.replace(/^\.(\/+|$)/,"")),this.filter(e.path,e)){const i=new r(e.path,t,!1);i.entry=new l(e,this[j](i)),i.entry.on("end",e=>this[S](i)),this[v]+=1,this[m].push(i)}else e.resume();this[b]()}[E](e){const t=I.resolve(this.cwd,e);this.prefix&&(e=this.prefix+"/"+e.replace(/^\.(\/+|$)/,"")),this[m].push(new r(e,t)),this[b]()}[_](e){e.pending=!0,this[v]+=1;const t=this.follow?"stat":"lstat";$[t](e.absolute,(t,i)=>{e.pending=!1,this[v]-=1,t?this.emit("error",t):this[d](e,i)})}[d](e,t){this.statCache.set(e.absolute,t),e.stat=t,this.filter(e.path,t)||(e.ignore=!0),this[b]()}[x](e){e.pending=!0,this[v]+=1,$.readdir(e.absolute,(t,i)=>{if(e.pending=!1,this[v]-=1,t)return this.emit("error",t);this[T](e,i)})}[T](e,t){this.readdirCache.set(e.absolute,t),e.readdir=t,this[b]()}[b](){if(!this[w]){this[w]=!0;for(let e=this[m].head;null!==e&&this[v]{this.warn(e,t)},noPax:this.noPax,cwd:this.cwd,absolute:e.absolute,preservePaths:this.preservePaths,maxReadSize:this.maxReadSize,strict:this.strict,portable:this.portable,linkCache:this.linkCache,statCache:this.statCache,noMtime:this.noMtime,mtime:this.mtime}}[R](e){this[v]+=1;try{return new this[P](e.path,this[j](e)).on("end",()=>this[S](e)).on("error",e=>this.emit("error",e))}catch(e){this.emit("error",e)}}[N](){this[y]&&this[y].entry&&this[y].entry.resume()}[O](e){e.piped=!0,e.readdir&&e.readdir.forEach(t=>{const i=this.prefix?e.path.slice(this.prefix.length+1)||"./":e.path,n="./"===i?"":i.replace(/\/*$/,"/");this[E](n+t)});const t=e.entry,i=this.zip;i?t.on("data",e=>{i.write(e)||t.pause()}):t.on("data",e=>{super.write(e)||t.pause()})}pause(){return this.zip&&this.zip.pause(),super.pause()}});A.Sync=class extends A{constructor(e){super(e),this[P]=h}pause(){}resume(){}[_](e){const t=this.follow?"statSync":"lstatSync";this[d](e,$[t](e.absolute))}[x](e,t){this[T](e,$.readdirSync(e.absolute))}[O](e){const t=e.entry,i=this.zip;e.readdir&&e.readdir.forEach(t=>{const i=this.prefix?e.path.slice(this.prefix.length+1)||"./":e.path,n="./"===i?"":i.replace(/\/*$/,"/");this[E](n+t)}),i?t.on("data",e=>{i.write(e)}):t.on("data",e=>{super[F](e)})}},e.exports=A},function(e,t,i){"use strict";const n=i(7),r=i(10),s=i(0);class o{constructor(e,t){this.atime=e.atime||null,this.charset=e.charset||null,this.comment=e.comment||null,this.ctime=e.ctime||null,this.gid=e.gid||null,this.gname=e.gname||null,this.linkpath=e.linkpath||null,this.mtime=e.mtime||null,this.path=e.path||null,this.size=e.size||null,this.uid=e.uid||null,this.uname=e.uname||null,this.dev=e.dev||null,this.ino=e.ino||null,this.nlink=e.nlink||null,this.global=t||!1}encode(){const e=this.encodeBody();if(""===e)return null;const t=n.byteLength(e),i=512*Math.ceil(1+t/512),o=n.allocUnsafe(i);for(let e=0;e<512;e++)o[e]=0;new r({path:("PaxHeader/"+s.basename(this.path)).slice(0,99),mode:this.mode||420,uid:this.uid||null,gid:this.gid||null,size:t,mtime:this.mtime||null,type:this.global?"GlobalExtendedHeader":"ExtendedHeader",linkpath:"",uname:this.uname||"",gname:this.gname||"",devmaj:0,devmin:0,atime:this.atime||null,ctime:this.ctime||null}).encode(o),o.write(e,512,t,"utf8");for(let e=t+512;e=Math.pow(10,s)&&(s+=1),s+r+i}}o.parse=((e,t,i)=>new o(a(c(e),t),i));const a=(e,t)=>t?Object.keys(e).reduce((t,i)=>(t[i]=e[i],t),t):e,c=e=>e.replace(/\n$/,"").split("\n").reduce(h,Object.create(null)),h=(e,t)=>{const i=parseInt(t,10);if(i!==n.byteLength(t)+1)return e;const r=(t=t.substr((i+" ").length)).split("="),s=r.shift().replace(/^SCHILY\.(dev|ino|nlink)/,"$1");if(!s)return e;const o=r.join("=");return e[s]=/^([A-Z]+\.)?([mac]|birth|creation)time$/.test(s)?new Date(1e3*o):/^[0-9]+$/.test(o)?+o:o,e};e.exports=o},function(e,t,i){"use strict";e.exports=(e=>(class extends e{warn(e,t){if(this.strict)if(t instanceof Error)this.emit("error",t);else{const i=new Error(e);i.data=t,this.emit("error",i)}else this.emit("warn",e,t)}}))},function(e,t,i){"use strict";const n=i(7),r=i(8),s=i(18),o=i(2),a=i(11),c=i(0),h=(e.exports=((e,t,i)=>{"function"==typeof e?(i=e,t=null,e={}):Array.isArray(e)&&(t=e,e={}),"function"==typeof t&&(i=t,t=null),t=t?Array.from(t):[];const n=r(e);if(n.sync&&"function"==typeof i)throw new TypeError("callback not supported for sync tar functions");if(!n.file&&"function"==typeof i)throw new TypeError("callback only supported with file option");return t.length&&l(n,t),n.noResume||h(n),n.file&&n.sync?u(n):n.file?f(n,i):d(n)}),e=>{const t=e.onentry;e.onentry=t?e=>{t(e),e.resume()}:e=>e.resume()}),l=(e,t)=>{const i=new Map(t.map(e=>[e.replace(/\/+$/,""),!0])),n=e.filter,r=(e,t)=>{const n=t||c.parse(e).root||".",s=e!==n&&(i.has(e)?i.get(e):r(c.dirname(e),n));return i.set(e,s),s};e.filter=n?(e,t)=>n(e,t)&&r(e.replace(/\/+$/,"")):e=>r(e.replace(/\/+$/,""))},u=e=>{const t=d(e),i=e.file;let r,s=!0;try{const a=o.statSync(i),c=e.maxReadSize||16777216;if(a.size{const i=new s(e),n=e.maxReadSize||16777216,r=e.file,c=new Promise((e,t)=>{i.on("error",t),i.on("end",e),o.stat(r,(e,s)=>{if(e)t(e);else{const e=new a.ReadStream(r,{readSize:n,size:s.size});e.on("error",t),e.pipe(i)}})});return t?c.then(t,t):c},d=e=>new s(e)},function(e,t,i){const n=i(3).fromCallback,r=i(1),s=["access","appendFile","chmod","chown","close","copyFile","fchmod","fchown","fdatasync","fstat","fsync","ftruncate","futimes","lchown","link","lstat","mkdir","mkdtemp","open","readFile","readdir","readlink","realpath","rename","rmdir","stat","symlink","truncate","unlink","utimes","writeFile"].filter(e=>"function"==typeof r[e]);Object.keys(r).forEach(e=>{t[e]=r[e]}),s.forEach(e=>{t[e]=n(r[e])}),t.exists=function(e,t){return"function"==typeof t?r.exists(e,t):new Promise(t=>r.exists(e,t))},t.read=function(e,t,i,n,s,o){return"function"==typeof o?r.read(e,t,i,n,s,o):new Promise((o,a)=>{r.read(e,t,i,n,s,(e,t,i)=>{if(e)return a(e);o({bytesRead:t,buffer:i})})})},t.write=function(e,t,i,n,s,o){return"function"==typeof arguments[arguments.length-1]?r.write(e,t,i,n,s,o):"string"==typeof t?new Promise((s,o)=>{r.write(e,t,i,n,(e,t,i)=>{if(e)return o(e);s({bytesWritten:t,buffer:i})})}):new Promise((o,a)=>{r.write(e,t,i,n,s,(e,t,i)=>{if(e)return a(e);o({bytesWritten:t,buffer:i})})})}},function(e,t,i){"use strict";var n=i(2);e.exports=function(e){if(null===e||"object"!=typeof e)return e;if(e instanceof Object)var t={__proto__:e.__proto__};else var t=Object.create(null);return Object.getOwnPropertyNames(e).forEach(function(i){Object.defineProperty(t,i,Object.getOwnPropertyDescriptor(e,i))}),t}(n)},function(e,t,i){var n=i(1),r=i(0),s=i(56);e.exports=function(e,t,i,o){o||(o=i,i={});var a=process.cwd(),c=r.resolve(a,e),h=r.resolve(a,t),l=i.filter,u=i.transform,f=i.overwrite;void 0===f&&(f=i.clobber),void 0===f&&(f=!0);var d=i.errorOnExist,p=i.dereference,m=!0===i.preserveTimestamps,y=0,b=0,w=0,g=!1;function v(e){if(y++,l)if(l instanceof RegExp){if(console.warn("Warning: fs-extra: Passing a RegExp filter is deprecated, use a function"),!l.test(e))return O(!0)}else if("function"==typeof l&&!l(e,t))return O(!0);return function(e){var t=p?n.stat:n.lstat;w++,t(e,function(t,i){if(t)return T(t);var s={name:e,mode:i.mode,mtime:i.mtime,atime:i.atime,stats:i};return i.isDirectory()?function(e){var t=e.name.replace(c,h.replace("$","$$$$"));x(t,function(i){if(i)return function(e,t){n.mkdir(t,e.mode,function(i){if(i)return T(i);n.chmod(t,e.mode,function(t){if(t)return T(t);k(e.name)})})}(e,t);k(e.name)})}(s):i.isFile()||i.isCharacterDevice()||i.isBlockDevice()?function(e){var t=e.name.replace(c,h.replace("$","$$$$"));x(t,function(i){i?S(e,t):f?E(t,function(){S(e,t)}):d?T(new Error(t+" already exists")):O()})}(s):i.isSymbolicLink()?function(e){var t=e.replace(c,h);n.readlink(e,function(e,i){if(e)return T(e);!function(e,t){p&&(e=r.resolve(a,e)),x(t,function(i){if(i)return _(e,t);n.readlink(t,function(i,n){return i?T(i):(p&&(n=r.resolve(a,n)),n===e?O():E(t,function(){_(e,t)}))})})}(i,t)})}(e):void 0})}(e)}function S(e,t){var i=n.createReadStream(e.name),r=n.createWriteStream(t,{mode:e.mode});i.on("error",T),r.on("error",T),u?u(i,r,e):r.on("open",function(){i.pipe(r)}),r.once("close",function(){n.chmod(t,e.mode,function(i){if(i)return T(i);m?s.utimesMillis(t,e.atime,e.mtime,function(e){return e?T(e):O()}):O()})})}function E(e,t){n.unlink(e,function(e){return e?T(e):t()})}function k(e){n.readdir(e,function(t,i){return t?T(t):(i.forEach(function(t){v(r.join(e,t))}),O())})}function _(e,t){n.symlink(e,t,function(e){return e?T(e):O()})}function x(e,t){n.lstat(e,function(e){return e&&"ENOENT"===e.code?t(!0):t(!1)})}function T(e){if(!g&&void 0!==o)return g=!0,o(e)}function O(e){if(e||w--,y===++b&&0===w&&void 0!==o)return o(null)}v(c)}},function(e,t){e.exports=require("os")},function(e,t,i){"use strict";const n=i(0);function r(e){return(e=n.normalize(n.resolve(e)).split(n.sep)).length>0?e[0]:null}const s=/[<>:"|?*]/;e.exports={getRootPath:r,invalidWin32Path:function(e){const t=r(e);return e=e.replace(t,""),s.test(e)}}},function(e,t,i){e.exports={copySync:i(59)}},function(e,t){e.exports=function(e){if("function"==typeof Buffer.allocUnsafe)try{return Buffer.allocUnsafe(e)}catch(t){return new Buffer(e)}return new Buffer(e)}},function(e,t,i){var n=i(34),r=n.Buffer;function s(e,t){for(var i in e)t[i]=e[i]}function o(e,t,i){return r(e,t,i)}r.from&&r.alloc&&r.allocUnsafe&&r.allocUnsafeSlow?e.exports=n:(s(n,t),t.Buffer=o),s(r,o),o.from=function(e,t,i){if("number"==typeof e)throw new TypeError("Argument must not be a number");return r(e,t,i)},o.alloc=function(e,t,i){if("number"!=typeof e)throw new TypeError("Argument must be a number");var n=r(e);return void 0!==t?"string"==typeof i?n.fill(t,i):n.fill(t):n.fill(0),n},o.allocUnsafe=function(e){if("number"!=typeof e)throw new TypeError("Argument must be a number");return r(e)},o.allocUnsafeSlow=function(e){if("number"!=typeof e)throw new TypeError("Argument must be a number");return n.SlowBuffer(e)}},function(e,t){e.exports=require("buffer")},function(e,t,i){"use strict";const n=i(12),r=i(34).Buffer,s=process.binding("zlib"),o=t.constants=i(88),a=i(9);class c extends Error{constructor(e,t){super("zlib: "+e),this.errno=t,this.code=h.get(t)}get name(){return"ZlibError"}}const h=new Map([[o.Z_OK,"Z_OK"],[o.Z_STREAM_END,"Z_STREAM_END"],[o.Z_NEED_DICT,"Z_NEED_DICT"],[o.Z_ERRNO,"Z_ERRNO"],[o.Z_STREAM_ERROR,"Z_STREAM_ERROR"],[o.Z_DATA_ERROR,"Z_DATA_ERROR"],[o.Z_MEM_ERROR,"Z_MEM_ERROR"],[o.Z_BUF_ERROR,"Z_BUF_ERROR"],[o.Z_VERSION_ERROR,"Z_VERSION_ERROR"]]),l=new Set([o.Z_NO_FLUSH,o.Z_PARTIAL_FLUSH,o.Z_SYNC_FLUSH,o.Z_FULL_FLUSH,o.Z_FINISH,o.Z_BLOCK]),u=new Set([o.Z_FILTERED,o.Z_HUFFMAN_ONLY,o.Z_RLE,o.Z_FIXED,o.Z_DEFAULT_STRATEGY]),f=Symbol("opts"),d=Symbol("chunkSize"),p=Symbol("flushFlag"),m=Symbol("finishFlush"),y=Symbol("handle"),b=Symbol("hadError"),w=Symbol("buffer"),g=Symbol("offset"),v=Symbol("level"),S=Symbol("strategy"),E=Symbol("ended"),k=Symbol("writeState");class _ extends a{constructor(e,t){if(super(e),this[E]=!1,this[f]=e=e||{},this[d]=e.chunkSize||o.Z_DEFAULT_CHUNK,e.flush&&!l.has(e.flush))throw new TypeError("Invalid flush flag: "+e.flush);if(e.finishFlush&&!l.has(e.finishFlush))throw new TypeError("Invalid flush flag: "+e.finishFlush);if(this[p]=e.flush||o.Z_NO_FLUSH,this[m]=void 0!==e.finishFlush?e.finishFlush:o.Z_FINISH,e.chunkSize&&e.chunkSizeo.Z_MAX_WINDOWBITS))throw new RangeError("Invalid windowBits: "+e.windowBits);if(e.level&&(e.levelo.Z_MAX_LEVEL))throw new RangeError("Invalid compression level: "+e.level);if(e.memLevel&&(e.memLevelo.Z_MAX_MEMLEVEL))throw new RangeError("Invalid memLevel: "+e.memLevel);if(e.strategy&&!u.has(e.strategy))throw new TypeError("Invalid strategy: "+e.strategy);if(e.dictionary&&!(e.dictionary instanceof r))throw new TypeError("Invalid dictionary: it should be a Buffer instance");this[y]=new s.Zlib(t),this[b]=!1,this[y].onerror=((e,t)=>{this.close(),this[b]=!0;const i=new c(e,t);this.emit("error",i)});const i="number"==typeof e.level?e.level:o.Z_DEFAULT_COMPRESSION;var n="number"==typeof e.strategy?e.strategy:o.Z_DEFAULT_STRATEGY;this[k]=new Uint32Array(2);const a=e.windowBits||o.Z_DEFAULT_WINDOWBITS,h=e.memLevel||o.Z_DEFAULT_MEMLEVEL;/^v[0-8]\./.test(process.version)?this[y].init(a,i,h,n,e.dictionary):this[y].init(a,i,h,n,this[k],()=>{},e.dictionary),this[w]=r.allocUnsafe(this[d]),this[g]=0,this[v]=i,this[S]=n,this.once("end",this.close)}close(){this[y]&&(this[y].close(),this[y]=null,this.emit("close"))}params(e,t){if(!this[y])throw new Error("cannot switch params when binding is closed");if(!this[y].params)throw new Error("not supported in this implementation");if(eo.Z_MAX_LEVEL)throw new RangeError("Invalid compression level: "+e);if(!u.has(t))throw new TypeError("Invalid strategy: "+t);this[v]===e&&this[S]===t||(this.flush(o.Z_SYNC_FLUSH),n(this[y],"zlib binding closed"),this[y].params(e,t),this[b]||(this[v]=e,this[S]=t))}reset(){return n(this[y],"zlib binding closed"),this[y].reset()}flush(e){if(void 0===e&&(e=o.Z_FULL_FLUSH),this.ended)return;const t=this[p];this[p]=e,this.write(r.alloc(0)),this[p]=t}end(e,t,i){return e&&this.write(e,t),this.flush(this[m]),this[E]=!0,super.end(null,null,i)}get ended(){return this[E]}write(e,t,i){"function"==typeof t&&(i=t,t="utf8"),"string"==typeof e&&(e=new r(e,t));let s=e&&e.length,o=this[d]-this[g],a=0;const c=this[p];let h=!0;n(this[y],"zlib binding closed");do{let t=this[y].writeSync(c,e,a,s,this[w],this[g],o);if(this[b])break;let i=t?t[0]:this[k][1],l=t?t[1]:this[k][0];const u=o-l;if(n(u>=0,"have should not go down"),u>0){const e=this[w].slice(this[g],this[g]+u);this[g]+=u,h=super.write(e)&&h}if((0===l||this[g]>=this[d])&&(o=this[d],this[g]=0,this[w]=r.allocUnsafe(this[d])),0!==l)break;a+=s-i,s=i}while(!this[b]);return i&&i(),h}}t.Deflate=class extends _{constructor(e){super(e,o.DEFLATE)}},t.Inflate=class extends _{constructor(e){super(e,o.INFLATE)}},t.Gzip=class extends _{constructor(e){super(e,o.GZIP)}},t.Gunzip=class extends _{constructor(e){super(e,o.GUNZIP)}},t.DeflateRaw=class extends _{constructor(e){super(e,o.DEFLATERAW)}},t.InflateRaw=class extends _{constructor(e){super(e,o.INFLATERAW)}},t.Unzip=class extends _{constructor(e){super(e,o.UNZIP)}}},function(e,t,i){"use strict";const n=i(7),r=i(9),s=i(23),o=i(10),a=(i(16),i(2)),c=i(0),h=(i(17),Symbol("process")),l=Symbol("file"),u=Symbol("directory"),f=Symbol("symlink"),d=Symbol("hardlink"),p=Symbol("header"),m=Symbol("read"),y=Symbol("lstat"),b=Symbol("onlstat"),w=Symbol("onread"),g=Symbol("onreadlink"),v=Symbol("openfile"),S=Symbol("onopenfile"),E=Symbol("close"),k=Symbol("mode"),_=i(24),x=i(37),T=i(90),O=_(class extends r{constructor(e,t){if(super(t=t||{}),"string"!=typeof e)throw new TypeError("path is required");if(this.path=e,this.portable=!!t.portable,this.myuid=process.getuid&&process.getuid(),this.myuser=process.env.USER||"",this.maxReadSize=t.maxReadSize||16777216,this.linkCache=t.linkCache||new Map,this.statCache=t.statCache||new Map,this.preservePaths=!!t.preservePaths,this.cwd=t.cwd||process.cwd(),this.strict=!!t.strict,this.noPax=!!t.noPax,this.noMtime=!!t.noMtime,this.mtime=t.mtime||null,"function"==typeof t.onwarn&&this.on("warn",t.onwarn),!this.preservePaths&&c.win32.isAbsolute(e)){const t=c.win32.parse(e);this.warn("stripping "+t.root+" from absolute path",e),this.path=e.substr(t.root.length)}this.win32=!!t.win32||"win32"===process.platform,this.win32&&(this.path=x.decode(this.path.replace(/\\/g,"/")),e=e.replace(/\\/g,"/")),this.absolute=t.absolute||c.resolve(this.cwd,e),""===this.path&&(this.path="./"),this.statCache.has(this.absolute)?this[b](this.statCache.get(this.absolute)):this[y]()}[y](){a.lstat(this.absolute,(e,t)=>{if(e)return this.emit("error",e);this[b](t)})}[b](e){this.statCache.set(this.absolute,e),this.stat=e,e.isFile()||(e.size=0),this.type=j(e),this.emit("stat",e),this[h]()}[h](){switch(this.type){case"File":return this[l]();case"Directory":return this[u]();case"SymbolicLink":return this[f]();default:return this.end()}}[k](e){return T(e,"Directory"===this.type)}[p](){"Directory"===this.type&&this.portable&&(this.noMtime=!0),this.header=new o({path:this.path,linkpath:this.linkpath,mode:this[k](this.stat.mode),uid:this.portable?null:this.stat.uid,gid:this.portable?null:this.stat.gid,size:this.stat.size,mtime:this.noMtime?null:this.mtime||this.stat.mtime,type:this.type,uname:this.portable?null:this.stat.uid===this.myuid?this.myuser:"",atime:this.portable?null:this.stat.atime,ctime:this.portable?null:this.stat.ctime}),this.header.encode()&&!this.noPax&&this.write(new s({atime:this.portable?null:this.header.atime,ctime:this.portable?null:this.header.ctime,gid:this.portable?null:this.header.gid,mtime:this.noMtime?null:this.mtime||this.header.mtime,path:this.path,linkpath:this.linkpath,size:this.header.size,uid:this.portable?null:this.header.uid,uname:this.portable?null:this.header.uname,dev:this.portable?null:this.stat.dev,ino:this.portable?null:this.stat.ino,nlink:this.portable?null:this.stat.nlink}).encode()),this.write(this.header.block)}[u](){"/"!==this.path.substr(-1)&&(this.path+="/"),this.stat.size=0,this[p](),this.end()}[f](){a.readlink(this.absolute,(e,t)=>{if(e)return this.emit("error",e);this[g](t)})}[g](e){this.linkpath=e,this[p](),this.end()}[d](e){this.type="Link",this.linkpath=c.relative(this.cwd,e),this.stat.size=0,this[p](),this.end()}[l](){if(this.stat.nlink>1){const e=this.stat.dev+":"+this.stat.ino;if(this.linkCache.has(e)){const t=this.linkCache.get(e);if(0===t.indexOf(this.cwd))return this[d](t)}this.linkCache.set(e,this.absolute)}if(this[p](),0===this.stat.size)return this.end();this[v]()}[v](){a.open(this.absolute,"r",(e,t)=>{if(e)return this.emit("error",e);this[S](t)})}[S](e){const t=512*Math.ceil(this.stat.size/512),i=Math.min(t,this.maxReadSize),r=n.allocUnsafe(i);this[m](e,r,0,r.length,0,this.stat.size,t)}[m](e,t,i,n,r,s,o){a.read(e,t,i,n,r,(a,c)=>{if(a)return this[E](e,e=>this.emit("error",a));this[w](e,t,i,n,r,s,o,c)})}[E](e,t){a.close(e,t)}[w](e,t,i,r,s,o,a,c){if(c<=0&&o>0){const e=new Error("unexpected EOF");e.path=this.absolute,e.syscall="read",e.code="EOF",this.emit("error",e)}if(c===o)for(let e=c;ee);i>=r&&(t=n.allocUnsafe(r),i=0),r=t.length-i,this[m](e,t,i,r,s,o,a)}});const R=_(class extends r{constructor(e,t){if(super(t=t||{}),this.preservePaths=!!t.preservePaths,this.portable=!!t.portable,this.strict=!!t.strict,this.noPax=!!t.noPax,this.noMtime=!!t.noMtime,this.readEntry=e,this.type=e.type,"Directory"===this.type&&this.portable&&(this.noMtime=!0),this.path=e.path,this.mode=this[k](e.mode),this.uid=this.portable?null:e.uid,this.gid=this.portable?null:e.gid,this.uname=this.portable?null:e.uname,this.gname=this.portable?null:e.gname,this.size=e.size,this.mtime=this.noMtime?null:t.mtime||e.mtime,this.atime=this.portable?null:e.atime,this.ctime=this.portable?null:e.ctime,this.linkpath=e.linkpath,"function"==typeof t.onwarn&&this.on("warn",t.onwarn),c.isAbsolute(this.path)&&!this.preservePaths){const e=c.parse(this.path);this.warn("stripping "+e.root+" from absolute path",this.path),this.path=this.path.substr(e.root.length)}this.remain=e.size,this.blockRemain=e.startBlockSize,this.header=new o({path:this.path,linkpath:this.linkpath,mode:this.mode,uid:this.portable?null:this.uid,gid:this.portable?null:this.gid,size:this.size,mtime:this.noMtime?null:this.mtime,type:this.type,uname:this.portable?null:this.uname,atime:this.portable?null:this.atime,ctime:this.portable?null:this.ctime}),this.header.encode()&&!this.noPax&&super.write(new s({atime:this.portable?null:this.atime,ctime:this.portable?null:this.ctime,gid:this.portable?null:this.gid,mtime:this.noMtime?null:this.mtime,path:this.path,linkpath:this.linkpath,size:this.size,uid:this.portable?null:this.uid,uname:this.portable?null:this.uname,dev:this.portable?null:this.readEntry.dev,ino:this.portable?null:this.readEntry.ino,nlink:this.portable?null:this.readEntry.nlink}).encode()),super.write(this.header.block),e.pipe(this)}[k](e){return T(e,"Directory"===this.type)}write(e){const t=e.length;if(t>this.blockRemain)throw new Error("writing more to entry than is appropriate");return this.blockRemain-=t,super.write(e)}end(){return this.blockRemain&&this.write(n.alloc(this.blockRemain)),super.end()}});O.Sync=class extends O{constructor(e,t){super(e,t)}[y](){this[b](a.lstatSync(this.absolute))}[f](){this[g](a.readlinkSync(this.absolute))}[v](){this[S](a.openSync(this.absolute,"r"))}[m](e,t,i,n,r,s,o){let c=!0;try{const h=a.readSync(e,t,i,n,r);this[w](e,t,i,n,r,s,o,h),c=!1}finally{if(c)try{this[E](e)}catch(e){}}}[E](e){a.closeSync(e)}},O.Tar=R;const j=e=>e.isFile()?"File":e.isDirectory()?"Directory":e.isSymbolicLink()?"SymbolicLink":"Unsupported";e.exports=O},function(e,t,i){"use strict";const n=["|","<",">","?",":"],r=n.map(e=>String.fromCharCode(61440+e.charCodeAt(0))),s=new Map(n.map((e,t)=>[e,r[t]])),o=new Map(r.map((e,t)=>[e,n[t]]));e.exports={encode:e=>n.reduce((e,t)=>e.split(t).join(s.get(t)),e),decode:e=>r.reduce((e,t)=>e.split(t).join(o.get(t)),e)}},function(e,t,i){"use strict";const n=i(7),r=i(8),s=i(22),o=(i(18),i(2)),a=i(11),c=i(25),h=i(0),l=i(10),u=(e.exports=((e,t,i)=>{const n=r(e);if(!n.file)throw new TypeError("file is required");if(n.gzip)throw new TypeError("cannot append to compressed archives");if(!t||!Array.isArray(t)||!t.length)throw new TypeError("no files or directories specified");return t=Array.from(t),n.sync?u(n,t):d(n,t,i)}),(e,t)=>{const i=new s.Sync(e);let r,a,c=!0;try{try{r=o.openSync(e.file,"r+")}catch(t){if("ENOENT"!==t.code)throw t;r=o.openSync(e.file,"w+")}const s=o.fstatSync(r),h=n.alloc(512);e:for(a=0;as.size)break;a+=i,e.mtimeCache&&e.mtimeCache.set(t.path,t.mtime)}c=!1,f(e,i,a,r,t)}finally{if(c)try{o.closeSync(r)}catch(e){}}}),f=(e,t,i,n,r)=>{const s=new a.WriteStreamSync(e.file,{fd:n,start:i});t.pipe(s),p(t,r)},d=(e,t,i)=>{t=Array.from(t);const r=new s(e),c=new Promise((i,s)=>{r.on("error",s);let c="r+";const h=(u,f)=>u&&"ENOENT"===u.code&&"r+"===c?(c="w+",o.open(e.file,c,h)):u?s(u):void o.fstat(f,(c,h)=>{if(c)return s(c);((t,i,r)=>{const s=(e,i)=>{e?o.close(t,t=>r(e)):r(null,i)};let a=0;if(0===i)return s(null,0);let c=0;const h=n.alloc(512),u=(n,r)=>{if(n)return s(n);if((c+=r)<512&&r)return o.read(t,h,c,h.length-c,a+c,u);if(0===a&&31===h[0]&&139===h[1])return s(new Error("cannot append to compressed archives"));if(c<512)return s(null,a);const f=new l(h);if(!f.cksumValid)return s(null,a);const d=512*Math.ceil(f.size/512);return a+d+512>i?s(null,a):(a+=d+512)>=i?s(null,a):(e.mtimeCache&&e.mtimeCache.set(f.path,f.mtime),c=0,void o.read(t,h,0,512,a,u))};o.read(t,h,0,512,a,u)})(f,h.size,(n,o)=>{if(n)return s(n);const c=new a.WriteStream(e.file,{fd:f,start:o});r.pipe(c),c.on("error",s),c.on("close",i),m(r,t)})});o.open(e.file,c,h)});return i?c.then(i,i):c},p=(e,t)=>{t.forEach(t=>{"@"===t.charAt(0)?c({file:h.resolve(e.cwd,t.substr(1)),sync:!0,noResume:!0,onentry:t=>e.add(t)}):e.add(t)}),e.end()},m=(e,t)=>{for(;t.length;){const i=t.shift();if("@"===i.charAt(0))return c({file:h.resolve(e.cwd,i.substr(1)),noResume:!0,onentry:t=>e.add(t)}).then(i=>m(e,t));e.add(i)}e.end()}},function(e,t,i){"use strict";const n=i(12),r=(i(14).EventEmitter,i(18)),s=i(2),o=i(11),a=i(0),c=i(93),h=(c.sync,i(37)),l=Symbol("onEntry"),u=Symbol("checkFs"),f=Symbol("isReusable"),d=Symbol("makeFs"),p=Symbol("file"),m=Symbol("directory"),y=Symbol("link"),b=Symbol("symlink"),w=Symbol("hardlink"),g=Symbol("unsupported"),v=(Symbol("unknown"),Symbol("checkPath")),S=Symbol("mkdir"),E=Symbol("onError"),k=Symbol("pending"),_=Symbol("pend"),x=Symbol("unpend"),T=Symbol("ended"),O=Symbol("maybeClose"),R=Symbol("skip"),j=Symbol("doChown"),P=Symbol("uid"),F=Symbol("gid"),N=i(96),$=(e,t)=>{if("win32"!==process.platform)return s.unlink(e,t);const i=e+".DELETE."+N.randomBytes(16).toString("hex");s.rename(e,i,e=>{if(e)return t(e);s.unlink(i,t)})},I=e=>{if("win32"!==process.platform)return s.unlinkSync(e);const t=e+".DELETE."+N.randomBytes(16).toString("hex");s.renameSync(e,t),s.unlinkSync(t)},A=(e,t,i)=>e===e>>>0?e:t===t>>>0?t:i;class M extends r{constructor(e){if(e||(e={}),e.ondone=(e=>{this[T]=!0,this[O]()}),super(e),this.transform="function"==typeof e.transform?e.transform:null,this.writable=!0,this.readable=!1,this[k]=0,this[T]=!1,this.dirCache=e.dirCache||new Map,"number"==typeof e.uid||"number"==typeof e.gid){if("number"!=typeof e.uid||"number"!=typeof e.gid)throw new TypeError("cannot set owner without number uid and gid");if(e.preserveOwner)throw new TypeError("cannot preserve owner in archive and also set owner explicitly");this.uid=e.uid,this.gid=e.gid,this.setOwner=!0}else this.uid=null,this.gid=null,this.setOwner=!1;void 0===e.preserveOwner&&"number"!=typeof e.uid?this.preserveOwner=process.getuid&&0===process.getuid():this.preserveOwner=!!e.preserveOwner,this.processUid=(this.preserveOwner||this.setOwner)&&process.getuid?process.getuid():null,this.processGid=(this.preserveOwner||this.setOwner)&&process.getgid?process.getgid():null,this.forceChown=!0===e.forceChown,this.win32=!!e.win32||"win32"===process.platform,this.newer=!!e.newer,this.keep=!!e.keep,this.noMtime=!!e.noMtime,this.preservePaths=!!e.preservePaths,this.unlink=!!e.unlink,this.cwd=a.resolve(e.cwd||process.cwd()),this.strip=+e.strip||0,this.processUmask=process.umask(),this.umask="number"==typeof e.umask?e.umask:this.processUmask,this.dmode=e.dmode||511&~this.umask,this.fmode=e.fmode||438&~this.umask,this.on("entry",e=>this[l](e))}[O](){this[T]&&0===this[k]&&(this.emit("prefinish"),this.emit("finish"),this.emit("end"),this.emit("close"))}[v](e){if(this.strip){const t=e.path.split(/\/|\\/);if(t.lengththis[E](t,e));let n=1;const r=t=>{if(t)return this[E](t,e);0==--n&&s.close(i.fd,e=>this[x]())};i.on("finish",t=>{const o=e.absolute,a=i.fd;if(e.mtime&&!this.noMtime){n++;const t=e.atime||new Date,i=e.mtime;s.futimes(a,t,i,e=>e?s.utimes(o,t,i,t=>r(t&&e)):r())}if(this[j](e)){n++;const t=this[P](e),i=this[F](e);s.fchown(a,t,i,e=>e?s.chown(o,t,i,t=>r(t&&e)):r())}r()});const a=this.transform&&this.transform(e)||e;a!==e&&(a.on("error",t=>this[E](t,e)),e.pipe(a)),a.pipe(i)}[m](e){const t=4095&e.mode||this.dmode;this[S](e.absolute,t,t=>{if(t)return this[E](t,e);let i=1;const n=t=>{0==--i&&(this[x](),e.resume())};e.mtime&&!this.noMtime&&(i++,s.utimes(e.absolute,e.atime||new Date,e.mtime,n)),this[j](e)&&(i++,s.chown(e.absolute,this[P](e),this[F](e),n)),n()})}[g](e){this.warn("unsupported entry type: "+e.type,e),e.resume()}[b](e){this[y](e,e.linkpath,"symlink")}[w](e){this[y](e,a.resolve(this.cwd,e.linkpath),"link")}[_](){this[k]++}[x](){this[k]--,this[O]()}[R](e){this[x](),e.resume()}[f](e,t){return"File"===e.type&&!this.unlink&&t.isFile()&&t.nlink<=1&&"win32"!==process.platform}[u](e){this[_](),this[S](a.dirname(e.absolute),this.dmode,t=>{if(t)return this[E](t,e);s.lstat(e.absolute,(t,i)=>{i&&(this.keep||this.newer&&i.mtime>e.mtime)?this[R](e):t||this[f](e,i)?this[d](null,e):i.isDirectory()?"Directory"===e.type?e.mode&&(4095&i.mode)!==e.mode?s.chmod(e.absolute,e.mode,t=>this[d](t,e)):this[d](null,e):s.rmdir(e.absolute,t=>this[d](t,e)):$(e.absolute,t=>this[d](t,e))})})}[d](e,t){if(e)return this[E](e,t);switch(t.type){case"File":case"OldFile":case"ContiguousFile":return this[p](t);case"Link":return this[w](t);case"SymbolicLink":return this[b](t);case"Directory":case"GNUDumpDir":return this[m](t)}}[y](e,t,i){s[i](t,e.absolute,t=>{if(t)return this[E](t,e);this[x](),e.resume()})}}M.Sync=class extends M{constructor(e){super(e)}[u](e){const t=this[S](a.dirname(e.absolute),this.dmode);if(t)return this[E](t,e);try{const i=s.lstatSync(e.absolute);if(this.keep||this.newer&&i.mtime>e.mtime)return this[R](e);if(this[f](e,i))return this[d](null,e);try{return i.isDirectory()?"Directory"===e.type?e.mode&&(4095&i.mode)!==e.mode&&s.chmodSync(e.absolute,e.mode):s.rmdirSync(e.absolute):I(e.absolute),this[d](null,e)}catch(t){return this[E](t,e)}}catch(t){return this[d](null,e)}}[p](e){const t=4095&e.mode||this.fmode,i=t=>{try{s.closeSync(n)}catch(e){}t&&this[E](t,e)};let n;try{n=s.openSync(e.absolute,"w",t)}catch(e){return i(e)}const r=this.transform&&this.transform(e)||e;r!==e&&(r.on("error",t=>this[E](t,e)),e.pipe(r)),r.on("data",e=>{try{s.writeSync(n,e,0,e.length)}catch(e){i(e)}}),r.on("end",t=>{let r=null;if(e.mtime&&!this.noMtime){const t=e.atime||new Date,i=e.mtime;try{s.futimesSync(n,t,i)}catch(n){try{s.utimesSync(e.absolute,t,i)}catch(e){r=n}}}if(this[j](e)){const t=this[P](e),i=this[F](e);try{s.fchownSync(n,t,i)}catch(n){try{s.chownSync(e.absolute,t,i)}catch(e){r=r||n}}}i(r)})}[m](e){const t=4095&e.mode||this.dmode,i=this[S](e.absolute,t);if(i)return this[E](i,e);if(e.mtime&&!this.noMtime)try{s.utimesSync(e.absolute,e.atime||new Date,e.mtime)}catch(i){}if(this[j](e))try{s.chownSync(e.absolute,this[P](e),this[F](e))}catch(i){}e.resume()}[S](e,t){try{return c.sync(e,{uid:this.uid,gid:this.gid,processUid:this.processUid,processGid:this.processGid,umask:this.processUmask,preserve:this.preservePaths,unlink:this.unlink,cache:this.dirCache,cwd:this.cwd,mode:t})}catch(e){return e}}[y](e,t,i){try{s[i+"Sync"](t,e.absolute),e.resume()}catch(t){return this[E](t,e)}}},e.exports=M},function(e,t,i){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.TOKEN_REF="$jsii.byref",t.TOKEN_DATE="$jsii.date",t.TOKEN_ENUM="$jsii.enum";t.ObjRef=class{}},function(e){e.exports={name:"jsii-runtime",version:"0.7.8",description:"jsii runtime kernel process",main:"lib/index.js",types:"lib/index.d.ts",bin:{"jsii-runtime":"bin/jsii-runtime"},scripts:{build:"tsc && chmod +x bin/jsii-runtime && /bin/bash ./bundle.sh",watch:"tsc -w",test:"/bin/bash test/playback-test.sh",package:"package-js"},devDependencies:{"@scope/jsii-calc-base":"^0.7.8","@scope/jsii-calc-lib":"^0.7.8","@types/node":"^9.6.18","jsii-build-tools":"^0.7.8","jsii-calc":"^0.7.8",nodeunit:"^0.11.3","source-map-loader":"^0.2.4",typescript:"^3.1.1","wasm-loader":"^1.3.0",webpack:"^4.12.0","webpack-command":"^0.2.1"},dependencies:{"jsii-kernel":"^0.7.8","jsii-spec":"^0.7.8"},author:{name:"Amazon Web Services",url:"https://aws.amazon.com"},license:"Apache-2.0",repository:{type:"git",url:"https://github.com/awslabs/jsii.git"}}},function(e,t,i){"use strict";Object.defineProperty(t,"__esModule",{value:!0});const n=i(43),r=i(99),s=i(41).name,o=i(41).version,a=!!process.env.JSII_NOSTACK,c=!!process.env.JSII_DEBUG,h=new r.InputOutput,l=new n.KernelHost(h,{debug:c,noStack:a});h.write({hello:`${s}@${o}`}),h.debug=c,l.run()},function(e,t,i){"use strict";Object.defineProperty(t,"__esModule",{value:!0});const n=i(44);t.KernelHost=class{constructor(e,t={}){this.inout=e,this.opts=t,this.kernel=new n.Kernel(e=>this.callbackHandler(e)),this.kernel.traceEnabled=!!t.debug}callbackHandler(e){this.inout.write({callback:e});const t=this;return function i(){const n=t.inout.read();if(!n)throw new Error("Interrupted before callback returned");const r=n;if("complete"in r&&r.complete.cbid===e.cbid){if(r.complete.err)throw new Error(r.complete.err);return r.complete.result}return t.processRequest(n,i,!0)}()}run(){const e=this.inout.read();e&&this.processRequest(e,()=>this.run())}processRequest(e,t,i=!1){if("callback"in e)throw new Error("Unexpected `callback` result. This request should have been processed by a callback handler");if(!("api"in e))throw new Error('Malformed request, "api" field is required');const n=e,r=this.findApi(n.api);try{const i=r.call(this.kernel,e);if("begin"===n.api||"complete"===n.api)return s(),this.debug("processing pending promises before responding"),void setImmediate(()=>{this.writeOkay(i),t()});if(this.isPromise(i))return s(),this.debug("waiting for promise to be fulfilled"),void i.then(e=>{this.debug("promise succeeded:",e),this.writeOkay(e),t()}).catch(e=>{this.debug("promise failed:",e),this.writeError(e),t()});this.writeOkay(i)}catch(e){this.writeError(e)}return t();function s(){if(i)throw new Error("Cannot handle async operations while waiting for a sync callback to return")}}writeOkay(e){const t={ok:e};this.inout.write(t)}writeError(e){const t={error:e.message,stack:void 0};this.opts.noStack||(t.stack=e.stack),this.inout.write(t)}isPromise(e){return e&&e.then&&"function"==typeof e.then}findApi(e){const t=this.kernel[e];if("function"!=typeof t)throw new Error("Invalid kernel api call: "+e);return t}debug(...e){this.opts.debug&&console.error(...e)}}},function(e,t,i){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e){for(var i in e)t.hasOwnProperty(i)||(t[i]=e[i])}(i(45));const n=i(40);t.api=n},function(e,t,i){"use strict";(function(e){Object.defineProperty(t,"__esModule",{value:!0});const n=i(47),r=i(76),s=i(29),o=i(0),a=i(84),c=i(97),h=i(40),l="$__jsii__objid__$",u="$__jsii__fqn__$",f="$__jsii__proxies__$",d="$__jsii__proxy_referent__$",p="Object";t.Kernel=class{constructor(t){this.callbackHandler=t,this.traceEnabled=!1,this.assemblies={},this.objects={},this.cbs={},this.waiting={},this.promises={},this.nextid=1e4,this.sourceMaps={};const n=i(98).Module._load;this.sandbox=c.createContext({Buffer:Buffer,setImmediate:setImmediate,require:t=>n(t,e,!1)})}async load(e){if(this._debug("load",e),"assembly"in e)throw new Error('`assembly` field is deprecated for "load", use `name`, `version` and `tarball` instead');this.installDir||(this.installDir=await n.mkdtemp(o.join(s.tmpdir(),"jsii-kernel-")),await n.mkdirp(o.join(this.installDir,"node_modules")),this._debug("creating jsii-kernel modules workdir:",this.installDir),process.on("exit",()=>{this.installDir&&(this._debug("removing install dir",this.installDir),n.removeSync(this.installDir))}));const t=e.name,i=e.version,c=o.join(this.installDir,"node_modules",t);if(await n.pathExists(c)){const e=await n.readJson(o.join(c,"package.json"));if(e.version!==i)throw new Error(`Multiple versions ${i} and ${e.version} of the `+`package '${t}' cannot be loaded together since this is unsupported by `+"some runtime environments");this._debug("look up already-loaded assembly",t);const r=this.assemblies[t];return{assembly:r.metadata.name,types:Object.keys(r.metadata.types||{}).length}}{const t=await n.mkdtemp(o.join(s.tmpdir(),"jsii-kernel-install-staging-"));try{await a.extract({strict:!0,file:e.tarball,cwd:t});const i=o.join(t,"package",r.SPEC_FILE_NAME);if(!await n.pathExists(i))throw new Error(`Package tarball ${e.tarball} must have a file named ${r.SPEC_FILE_NAME} at the root`);const s=await n.readJson(i);await n.move(o.join(t,"package"),c);const h=this._execute(`require(String.raw\`${c}\`)`,c),l=new m(s,h);return this._addAssembly(l),{assembly:s.name,types:Object.keys(s.types||{}).length}}finally{this._debug("removing staging directory:",t),await n.remove(t)}}}create(e){return this._create(e)}del(e){const{objref:t}=e;this._debug("del",t);const i=this._findObject(t);return delete this.objects[t[h.TOKEN_REF]],i[d]&&delete i[d][f][i[u]],{}}sget(e){const{fqn:t,property:i}=e,n=`${t}.${i}`;this._debug("sget",n);const r=this._typeInfoForProperty(t,i);if(!r.static)throw new Error(`property ${n} is not static`);const s=this._findSymbol(t),o=this._ensureSync(`property ${i}`,()=>this._wrapSandboxCode(()=>s[i]));this._debug("value:",o);const a=this._fromSandbox(o,r.type);return this._debug("ret",a),{value:a}}sset(e){const{fqn:t,property:i,value:n}=e,r=`${t}.${i}`;this._debug("sset",r);const s=this._typeInfoForProperty(t,i);if(!s.static)throw new Error(`property ${r} is not static`);if(s.immutable)throw new Error(`static property ${r} is readonly`);const o=this._findSymbol(t);return this._ensureSync(`property ${i}`,()=>this._wrapSandboxCode(()=>o[i]=this._toSandbox(n))),{}}get(e){const{objref:t,property:i}=e;this._debug("get",t,i);const n=this._findObject(t),r=this._fqnForObject(n),s=this._typeInfoForProperty(r,i),o=this._findPropertyTarget(n,i),a=this._ensureSync(`property '${t[h.TOKEN_REF]}.${o}'`,()=>this._wrapSandboxCode(()=>n[o]));this._debug("value:",a);const c=this._fromSandbox(a,s.type);return this._debug("ret:",c),{value:c}}set(e){const{objref:t,property:i,value:n}=e;this._debug("set",t,i,n);const r=this._findObject(t),s=this._fqnForObject(r);if(this._typeInfoForProperty(s,e.property).immutable)throw new Error(`Cannot set value of immutable property ${e.property} to ${e.value}`);const o=this._findPropertyTarget(r,i);return this._ensureSync(`property '${t[h.TOKEN_REF]}.${o}'`,()=>this._wrapSandboxCode(()=>r[o]=this._toSandbox(n))),{}}invoke(e){const{objref:t,method:i}=e,n=e.args||[];this._debug("invoke",t,i,n);const{ti:r,obj:s,fn:o}=this._findInvokeTarget(t,i,n);if(r.returns&&r.returns.promise)throw new Error(`${i} is an async method, use "begin" instead`);const a=this._ensureSync(`method '${t[h.TOKEN_REF]}.${i}'`,()=>this._wrapSandboxCode(()=>o.apply(s,this._toSandboxValues(n))));return{result:this._fromSandbox(a,r.returns)}}sinvoke(e){const{fqn:t,method:i}=e,n=e.args||[];this._debug("sinvoke",t,i,n);const r=this._typeInfoForMethod(t,i);if(!r.static)throw new Error(`${t}.${i} is not a static method`);if(r.returns&&r.returns.promise)throw new Error(`${i} is an async method, use "begin" instead`);const s=this._findSymbol(t)[i],o=this._ensureSync(`method '${t}.${i}'`,()=>this._wrapSandboxCode(()=>s.apply(null,this._toSandboxValues(n))));return this._debug("method returned:",o),{result:this._fromSandbox(o,r.returns)}}begin(e){const{objref:t,method:i}=e,n=e.args||[];if(this._debug("begin",t,i,n),this.syncInProgress)throw new Error(`Cannot invoke async method '${e.objref[h.TOKEN_REF]}.${e.method}' while sync ${this.syncInProgress} is being processed`);const{ti:r,obj:s,fn:o}=this._findInvokeTarget(t,i,n);if(!r.returns||!r.returns.promise)throw new Error(`Method ${i} is expected to be an async method`);const a=this._wrapSandboxCode(()=>o.apply(s,this._toSandboxValues(n)));a.catch(e=>void 0);const c=this._makeprid();return this.promises[c]={promise:a,method:r},{promiseid:c}}async end(e){const{promiseid:t}=e;this._debug("end",t);const{promise:i,method:n}=this.promises[t];if(!i)throw new Error(`Cannot find promise with ID: ${t}`);let r;try{r=await i,this._debug("promise result:",r)}catch(e){throw this._debug("promise error:",e),y(e,this.sourceMaps)}return{result:this._fromSandbox(r,n.returns)}}callbacks(e){this._debug("callbacks");const t=Object.keys(this.cbs).map(e=>{const t=this.cbs[e];return this.waiting[e]=t,{cbid:e,cookie:t.override.cookie,invoke:{objref:t.objref,method:t.override.method,args:t.args}}});return this.cbs={},{callbacks:t}}complete(e){const{cbid:t,err:i,result:n}=e;if(this._debug("complete",t,i,n),!(t in this.waiting))throw new Error(`Callback ${t} not found`);const r=this.waiting[t];if(i)this._debug("completed with error:",i),r.fail(new Error(i));else{const e=this._toSandbox(n);this._debug("completed with result:",e),r.succeed(e)}return delete this.waiting[t],{cbid:t}}naming(e){const t=e.assembly;this._debug("naming",t);const i=this._assemblyFor(t).metadata.targets;if(!i)throw new Error(`Unexpected - "targets" for ${t} is missing!`);return{naming:i}}stats(e){return{objectCount:Object.keys(this.objects).length}}_addAssembly(e){this.assemblies[e.metadata.name]=e;for(const t of Object.keys(e.metadata.types||{}))switch(e.metadata.types[t].kind){case r.TypeKind.Interface:continue;case r.TypeKind.Class:case r.TypeKind.Enum:this._findSymbol(t).__jsii__={fqn:t}}}_findCtor(e,t){if(e===p)return Object;const i=this._typeInfoForFqn(e);switch(i.kind){case r.TypeKind.Class:const n=i;return this._validateMethodArguments(n.initializer,t),this._findSymbol(e);case r.TypeKind.Interface:return Object;default:throw new Error(`Unexpected FQN kind: ${e}`)}}_create(e){const{fqn:t,overrides:i}=e,n=e.args||[],r=this._findCtor(t,n),s=this._wrapSandboxCode(()=>new r(...this._toSandboxValues(n))),o=this._createObjref(s,t);if(i){this._debug("overrides",i);const e='Override can either be "method" or "property"',n=new Set,r=new Set;for(const a of i)if(a.method){if(a.property)throw new Error(e);if(n.has(a.method))throw new Error(`Duplicate override for method '${a.method}'`);let i;if(n.add(a.method),t!==p){if(this._tryTypeInfoForProperty(t,a.method))throw new Error(`Trying to override property '${a.method}' as a method`);i=this._tryTypeInfoForMethod(t,a.method)}this._applyMethodOverride(s,o,a,i)}else{if(!a.property)throw new Error(e);{if(a.method)throw new Error(e);if(r.has(a.property))throw Error(`Duplicate override for property '${a.property}'`);let i;if(r.add(a.property),t!==p){if(this._tryTypeInfoForMethod(t,a.property))throw new Error(`Trying to override method '${a.property}' as a property`);i=this._tryTypeInfoForProperty(t,a.property)}this._applyPropertyOverride(s,o,a,i)}}}return o}_getSuperPropertyName(e){return`$jsii$super$${e}$`}_applyPropertyOverride(e,t,i,n){const r=this,s=i.property;if(!n&&s in e)return void this._debug(`Skipping override of private property ${s}`);this._debug("apply override",s);const o=Object.getOwnPropertyDescriptor(e,s)||{value:void 0,writable:!0,enumerable:!0,configurable:!0},a=o.enumerable;o.enumerable=!1,Object.defineProperty(e,this._getSuperPropertyName(s),o),Object.defineProperty(e,s,{enumerable:a,configurable:o.configurable,get:()=>{const e=r.callbackHandler({cookie:i.cookie,cbid:r._makecbid(),get:{objref:t,property:s}});return this._debug("callback returned",e),this._toSandbox(e)},set:e=>{r._debug("virtual set",t,s,{cookie:i.cookie}),r.callbackHandler({cookie:i.cookie,cbid:r._makecbid(),set:{objref:t,property:s,value:r._fromSandbox(e)}})}})}_applyMethodOverride(e,t,i,n){const r=this,s=i.method;n||!e[s]?n&&n.returns&&n.returns.promise?Object.defineProperty(e,s,{enumerable:!1,configurable:!1,writable:!1,value:(...e)=>{r._debug("invoked async override",i);const n=r._toSandboxValues(e);return new Promise((e,s)=>{const o=r._makecbid();r._debug("adding callback to queue",o),r.cbs[o]={objref:t,override:i,args:n,succeed:e,fail:s}})}}):Object.defineProperty(e,s,{enumerable:!1,configurable:!1,writable:!1,value:(...e)=>{const n=r.callbackHandler({cookie:i.cookie,cbid:r._makecbid(),invoke:{objref:t,method:s,args:this._fromSandbox(e)}});return this._toSandbox(n)}}):this._debug(`Skipping override of private method ${s}`)}_findInvokeTarget(e,t,i){const n=this._findObject(e),r=this._fqnForObject(n),s=this._typeInfoForMethod(r,t);this._validateMethodArguments(s,i);let o=n.constructor.prototype[t];if(!o&&!(o=n[t]))throw new Error(`Cannot find ${t} on object`);return{ti:s,obj:n,fn:o}}_formatTypeRef(e){if(r.isCollectionTypeReference(e))return`${e.collection.kind}<${this._formatTypeRef(e.collection.elementtype)}>`;if(r.isNamedTypeReference(e))return e.fqn;if(r.isPrimitiveTypeReference(e))return e.primitive;if(r.isUnionTypeReference(e))return e.union.types.map(e=>this._formatTypeRef(e)).join(" | ");throw new Error(`Invalid type reference: ${JSON.stringify(e)}`)}_validateMethodArguments(e,t){const i=e&&e.parameters||[];if(t.length>i.length&&(!e||!e.variadic))throw new Error(`Too many arguments (method accepts ${i.length} parameters, got ${t.length} arguments)`);for(let e=0;ee.fqn)}}for(const e of n||[])if(e.name===t)return e;for(const e of s){const i=this._tryTypeInfoForProperty(e,t);if(i)return i}}_typeInfoForProperty(e,t){const i=this._tryTypeInfoForProperty(e,t);if(!i)throw new Error(`Type ${e} doesn't have a property '${t}'`);return i}_toSandbox(e){if(void 0!==e){if(null===e)return null;if("object"==typeof e&&h.TOKEN_REF in e)return this._findObject(e);if("object"==typeof e&&h.TOKEN_DATE in e)return this._debug("Found date:",e),new Date(e[h.TOKEN_DATE]);if("object"==typeof e&&h.TOKEN_ENUM in e){this._debug("Enum:",e);const t=e[h.TOKEN_ENUM],i=t.lastIndexOf("/");if(-1===i)throw new Error(`Malformed enum value: ${e[h.TOKEN_ENUM]}`);const n=t.substr(0,i),r=t.substr(i+1),s=this._findSymbol(n)[r];if(void 0===s)throw new Error(`No enum member named ${r} in ${n}`);return this._debug("resolved enum value:",s),s}if(Array.isArray(e))return e.map(e=>this._toSandbox(e));if("object"==typeof e){const t={};for(const i of Object.keys(e))t[i]=this._toSandbox(e[i]);return t}return e}}_fromSandbox(e,t){if(this._debug("fromSandbox",e,t),void 0===e)return;const i=e[l];if(i)return this._debug("objref exists",i),{[h.TOKEN_REF]:i};if("object"==typeof e&&e.constructor.__jsii__){this._debug("creating objref for",e);const t=this._fqnForObject(e);return this._createObjref(e,t)}if("object"==typeof e&&t&&r.isNamedTypeReference(t)){this._debug("coalescing to",t);const i=e[f]=e[f]||{};if(!i[t.fqn]){const n=new b(e),r=new Proxy(e,n);i[t.fqn]={objRef:this._createObjref(r,t.fqn),handler:n}}return i[t.fqn].objRef}if("object"==typeof e&&"[object Date]"===Object.prototype.toString.call(e))return this._debug("date",e),{[h.TOKEN_DATE]:e.toISOString()};if(Array.isArray(e))return this._debug("array",e),e.map(e=>this._fromSandbox(e));if(t&&r.isNamedTypeReference(t)){const i=this._typeInfoForFqn(t.fqn);if(i.kind===r.TypeKind.Enum){this._debug("enum",e);const t=i.fqn,n=this._findSymbol(t)[e];return{[h.TOKEN_ENUM]:`${i.fqn}/${n}`}}}if("object"==typeof e){this._debug("map",e);const t={};for(const i of Object.keys(e))t[i]=this._fromSandbox(e[i]);return t}return this._debug("primitive",e),e}_toSandboxValues(e){return e.map(e=>this._toSandbox(e))}_debug(...e){this.traceEnabled&&console.error.apply(console,["[jsii-kernel]",e[0],...e.slice(1)])}_ensureSync(e,t){this.syncInProgress=e;try{return t()}catch(e){throw e}finally{delete this.syncInProgress}}_findPropertyTarget(e,t){const i=this._getSuperPropertyName(t);return i in e?i:t}_fqnForObject(e){if(u in e)return e[u];if(!e.constructor.__jsii__)throw new Error("No jsii type info for object");return e.constructor.__jsii__.fqn}_mkobjid(e){return`${e}@${this.nextid++}`}_makecbid(){return`jsii::callback::${this.nextid++}`}_makeprid(){return`jsii::promise::${this.nextid++}`}_wrapSandboxCode(e){try{return e()}catch(e){throw y(e,this.sourceMaps)}}_execute(e,t){const i=new c.Script(e,{filename:t});try{return i.runInContext(this.sandbox,{displayErrors:!0})}catch(e){throw y(e,this.sourceMaps)}}};class m{constructor(e,t){this.metadata=e,this.closure=t}}function y(e,t){if(!e.stack)return e;const i=e.stack.split("\n"),n={stack:""},r=Error.stackTraceLimit;try{Error.stackTraceLimit=e.stack.split("\n").length,Error.captureStackTrace(n,y);const s=n.stack.split("\n").slice(1),o=s[0].substring(0,s[0].indexOf(" ("));return e.stack=[...i.slice(0,i.findIndex(e=>e.startsWith(o))).map(function(e){const i=/^(\s*at\s+.+)\(jsii\/(.+)\.js:(\d+):(\d+)\)$/.exec(e);if(!i)return e;const n=i[2];if(!(n in t))return e;const r=i[1],s=parseInt(i[3],10),o=parseInt(i[4],10),a=t[n].originalPositionFor({line:s,column:o});if(null!=a.source&&null!=a.line){const e=a.source.replace(/^webpack:\/\//,`${n}`);return`${r}(${e}:${a.line}:${a.column||0})`}return e}),...s].join("\n"),e}finally{Error.stackTraceLimit=r}}class b{constructor(e){this.referent=e,this.ownProperties={};for(const t of[u,l])Object.defineProperty(e,t,{configurable:!1,enumerable:!1,writable:!0,value:void 0})}defineProperty(e,t,i){switch(t){case u:case l:return Object.defineProperty(this.ownProperties,t,i);default:return Object.defineProperty(e,t,i)}}deleteProperty(e,t){switch(t){case u:case l:delete this.ownProperties[t];break;default:delete e[t]}return!0}getOwnPropertyDescriptor(e,t){switch(t){case u:case l:return Object.getOwnPropertyDescriptor(this.ownProperties,t);default:return Object.getOwnPropertyDescriptor(e,t)}}get(e,t){switch(t){case d:return this.referent;case u:case l:return this.ownProperties[t];default:return e[t]}}set(e,t,i){switch(t){case u:case l:this.ownProperties[t]=i;break;default:e[t]=i}return!0}has(e,t){switch(t){case u:case l:return t in this.ownProperties;default:return t in e}}ownKeys(e){return Reflect.ownKeys(e).concat(Reflect.ownKeys(this.ownProperties))}}}).call(this,i(46)(e))},function(e,t){e.exports=function(e){return e.webpackPolyfill||(e.deprecate=function(){},e.paths=[],e.children||(e.children=[]),Object.defineProperty(e,"loaded",{enumerable:!0,get:function(){return e.l}}),Object.defineProperty(e,"id",{enumerable:!0,get:function(){return e.i}}),e.webpackPolyfill=1),e}},function(e,t,i){"use strict";const n=i(48),r={};n(r,i(26)),n(r,i(54)),n(r,i(31)),n(r,i(4)),n(r,i(13)),n(r,i(62)),n(r,i(66)),n(r,i(67)),n(r,i(68)),n(r,i(69)),n(r,i(75)),n(r,i(5)),e.exports=r},function(e,t,i){"use strict";e.exports=function(){const e=[].slice.call(arguments).filter(e=>e),t=e.shift();return e.forEach(e=>{Object.keys(e).forEach(i=>{t[i]=e[i]})}),t}},function(e,t,i){var n=i(27),r=i(50),s=process.cwd,o=null,a=process.env.GRACEFUL_FS_PLATFORM||process.platform;process.cwd=function(){return o||(o=s.call(process)),o};try{process.cwd()}catch(e){}var c=process.chdir;function h(e){return e?function(t,i,r){return e.call(n,t,i,function(e){m(e)&&(e=null),r&&r.apply(this,arguments)})}:e}function l(e){return e?function(t,i){try{return e.call(n,t,i)}catch(e){if(!m(e))throw e}}:e}function u(e){return e?function(t,i,r,s){return e.call(n,t,i,r,function(e){m(e)&&(e=null),s&&s.apply(this,arguments)})}:e}function f(e){return e?function(t,i,r){try{return e.call(n,t,i,r)}catch(e){if(!m(e))throw e}}:e}function d(e){return e?function(t,i){return e.call(n,t,function(e,t){if(!t)return i.apply(this,arguments);t.uid<0&&(t.uid+=4294967296),t.gid<0&&(t.gid+=4294967296),i&&i.apply(this,arguments)})}:e}function p(e){return e?function(t){var i=e.call(n,t);return i.uid<0&&(i.uid+=4294967296),i.gid<0&&(i.gid+=4294967296),i}:e}function m(e){return!e||("ENOSYS"===e.code||!(process.getuid&&0===process.getuid()||"EINVAL"!==e.code&&"EPERM"!==e.code))}process.chdir=function(e){o=null,c.call(process,e)},e.exports=function(e){r.hasOwnProperty("O_SYMLINK")&&process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)&&function(e){e.lchmod=function(t,i,n){e.open(t,r.O_WRONLY|r.O_SYMLINK,i,function(t,r){t?n&&n(t):e.fchmod(r,i,function(t){e.close(r,function(e){n&&n(t||e)})})})},e.lchmodSync=function(t,i){var n,s=e.openSync(t,r.O_WRONLY|r.O_SYMLINK,i),o=!0;try{n=e.fchmodSync(s,i),o=!1}finally{if(o)try{e.closeSync(s)}catch(e){}else e.closeSync(s)}return n}}(e);e.lutimes||function(e){r.hasOwnProperty("O_SYMLINK")?(e.lutimes=function(t,i,n,s){e.open(t,r.O_SYMLINK,function(t,r){t?s&&s(t):e.futimes(r,i,n,function(t){e.close(r,function(e){s&&s(t||e)})})})},e.lutimesSync=function(t,i,n){var s,o=e.openSync(t,r.O_SYMLINK),a=!0;try{s=e.futimesSync(o,i,n),a=!1}finally{if(a)try{e.closeSync(o)}catch(e){}else e.closeSync(o)}return s}):(e.lutimes=function(e,t,i,n){n&&process.nextTick(n)},e.lutimesSync=function(){})}(e);e.chown=u(e.chown),e.fchown=u(e.fchown),e.lchown=u(e.lchown),e.chmod=h(e.chmod),e.fchmod=h(e.fchmod),e.lchmod=h(e.lchmod),e.chownSync=f(e.chownSync),e.fchownSync=f(e.fchownSync),e.lchownSync=f(e.lchownSync),e.chmodSync=l(e.chmodSync),e.fchmodSync=l(e.fchmodSync),e.lchmodSync=l(e.lchmodSync),e.stat=d(e.stat),e.fstat=d(e.fstat),e.lstat=d(e.lstat),e.statSync=p(e.statSync),e.fstatSync=p(e.fstatSync),e.lstatSync=p(e.lstatSync),e.lchmod||(e.lchmod=function(e,t,i){i&&process.nextTick(i)},e.lchmodSync=function(){});e.lchown||(e.lchown=function(e,t,i,n){n&&process.nextTick(n)},e.lchownSync=function(){});"win32"===a&&(e.rename=function(t){return function(i,n,r){var s=Date.now(),o=0;t(i,n,function a(c){if(c&&("EACCES"===c.code||"EPERM"===c.code)&&Date.now()-s<6e4)return setTimeout(function(){e.stat(n,function(e,s){e&&"ENOENT"===e.code?t(i,n,a):r(c)})},o),void(o<100&&(o+=10));r&&r(c)})}}(e.rename));e.read=function(t){return function(i,n,r,s,o,a){var c;if(a&&"function"==typeof a){var h=0;c=function(l,u,f){if(l&&"EAGAIN"===l.code&&h<10)return h++,t.call(e,i,n,r,s,o,c);a.apply(this,arguments)}}return t.call(e,i,n,r,s,o,c)}}(e.read),e.readSync=function(t){return function(i,n,r,s,o){for(var a=0;;)try{return t.call(e,i,n,r,s,o)}catch(e){if("EAGAIN"===e.code&&a<10){a++;continue}throw e}}}(e.readSync)}},function(e,t){e.exports=require("constants")},function(e,t,i){var n=i(52).Stream;e.exports=function(e){return{ReadStream:function t(i,r){if(!(this instanceof t))return new t(i,r);n.call(this);var s=this;this.path=i;this.fd=null;this.readable=!0;this.paused=!1;this.flags="r";this.mode=438;this.bufferSize=65536;r=r||{};var o=Object.keys(r);for(var a=0,c=o.length;athis.end)throw new Error("start must be <= end");this.pos=this.start}if(null!==this.fd)return void process.nextTick(function(){s._read()});e.open(this.path,this.flags,this.mode,function(e,t){if(e)return s.emit("error",e),void(s.readable=!1);s.fd=t,s.emit("open",t),s._read()})},WriteStream:function t(i,r){if(!(this instanceof t))return new t(i,r);n.call(this);this.path=i;this.fd=null;this.writable=!0;this.flags="w";this.encoding="binary";this.mode=438;this.bytesWritten=0;r=r||{};var s=Object.keys(r);for(var o=0,a=s.length;o= zero");this.pos=this.start}this.busy=!1;this._queue=[];null===this.fd&&(this._open=e.open,this._queue.push([this._open,this.path,this.flags,this.mode,void 0]),this.flush())}}}},function(e,t){e.exports=require("stream")},function(e,t){e.exports=require("util")},function(e,t,i){const n=i(3).fromCallback;e.exports={copy:n(i(55))}},function(e,t,i){"use strict";const n=i(1),r=i(0),s=i(28),o=i(4),a=i(5).pathExists;e.exports=function(e,t,i,c){"function"!=typeof i||c?("function"==typeof i||i instanceof RegExp)&&(i={filter:i}):(c=i,i={}),c=c||function(){},(i=i||{}).preserveTimestamps&&"ia32"===process.arch&&console.warn("fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n\n see https://github.com/jprichardson/node-fs-extra/issues/269");const h=process.cwd();if(r.resolve(h,e)===r.resolve(h,t))return c(new Error("Source and destination must not be the same."));n.lstat(e,(n,h)=>{if(n)return c(n);let l=null;if(h.isDirectory()){const e=t.split(r.sep);e.pop(),l=e.join(r.sep)}else l=r.dirname(t);a(l,(n,r)=>n?c(n):r?s(e,t,i,c):void o.mkdirs(l,n=>{if(n)return c(n);s(e,t,i,c)}))})}},function(e,t,i){"use strict";const n=i(1),r=i(29),s=i(0);e.exports={hasMillisRes:function(e){let t=s.join("millis-test"+Date.now().toString()+Math.random().toString().slice(2));t=s.join(r.tmpdir(),t);const i=new Date(1435410243862);n.writeFile(t,"https://github.com/jprichardson/node-fs-extra/pull/141",r=>{if(r)return e(r);n.open(t,"r+",(r,s)=>{if(r)return e(r);n.futimes(s,i,i,i=>{if(i)return e(i);n.close(s,i=>{if(i)return e(i);n.stat(t,(t,i)=>{if(t)return e(t);e(null,i.mtime>1435410243e3)})})})})})},hasMillisResSync:function(){let e=s.join("millis-test-sync"+Date.now().toString()+Math.random().toString().slice(2));e=s.join(r.tmpdir(),e);const t=new Date(1435410243862);n.writeFileSync(e,"https://github.com/jprichardson/node-fs-extra/pull/141");const i=n.openSync(e,"r+");return n.futimesSync(i,t,t),n.closeSync(i),n.statSync(e).mtime>1435410243e3},timeRemoveMillis:function(e){if("number"==typeof e)return 1e3*Math.floor(e/1e3);if(e instanceof Date)return new Date(1e3*Math.floor(e.getTime()/1e3));throw new Error("fs-extra: timeRemoveMillis() unknown parameter type")},utimesMillis:function(e,t,i,r){n.open(e,"r+",(e,s)=>{if(e)return r(e);n.futimes(s,t,i,e=>{n.close(s,t=>{r&&r(e||t)})})})}}},function(e,t,i){"use strict";const n=i(1),r=i(0),s=i(30).invalidWin32Path,o=parseInt("0777",8);e.exports=function e(t,i,a,c){if("function"==typeof i?(a=i,i={}):i&&"object"==typeof i||(i={mode:i}),"win32"===process.platform&&s(t)){const e=new Error(t+" contains invalid WIN32 path characters.");return e.code="EINVAL",a(e)}let h=i.mode;const l=i.fs||n;void 0===h&&(h=o&~process.umask()),c||(c=null),a=a||function(){},t=r.resolve(t),l.mkdir(t,h,n=>{if(!n)return a(null,c=c||t);switch(n.code){case"ENOENT":if(r.dirname(t)===t)return a(n);e(r.dirname(t),i,(n,r)=>{n?a(n,r):e(t,i,a,r)});break;default:l.stat(t,(e,t)=>{e||!t.isDirectory()?a(n,c):a(null,c)})}})}},function(e,t,i){"use strict";const n=i(1),r=i(0),s=i(30).invalidWin32Path,o=parseInt("0777",8);e.exports=function e(t,i,a){i&&"object"==typeof i||(i={mode:i});let c=i.mode;const h=i.fs||n;if("win32"===process.platform&&s(t)){const e=new Error(t+" contains invalid WIN32 path characters.");throw e.code="EINVAL",e}void 0===c&&(c=o&~process.umask()),a||(a=null),t=r.resolve(t);try{h.mkdirSync(t,c),a=a||t}catch(n){switch(n.code){case"ENOENT":if(r.dirname(t)===t)throw n;e(t,i,a=e(r.dirname(t),i,a));break;default:let s;try{s=h.statSync(t)}catch(e){throw n}if(!s.isDirectory())throw n}}return a}},function(e,t,i){"use strict";const n=i(1),r=i(0),s=i(60),o=i(4);e.exports=function e(t,i,a){("function"==typeof a||a instanceof RegExp)&&(a={filter:a}),(a=a||{}).recursive=!!a.recursive,a.clobber=!("clobber"in a&&!a.clobber),a.overwrite="overwrite"in a?!!a.overwrite:a.clobber,a.dereference="dereference"in a&&!!a.dereference,a.preserveTimestamps="preserveTimestamps"in a&&!!a.preserveTimestamps,a.filter=a.filter||function(){return!0},a.preserveTimestamps&&"ia32"===process.arch&&console.warn("fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n\n see https://github.com/jprichardson/node-fs-extra/issues/269");const c=a.recursive&&!a.dereference?n.lstatSync(t):n.statSync(t),h=r.dirname(i),l=n.existsSync(h);let u=!1;if(a.filter instanceof RegExp?(console.warn("Warning: fs-extra: Passing a RegExp filter is deprecated, use a function"),u=a.filter.test(t)):"function"==typeof a.filter&&(u=a.filter(t,i)),c.isFile()&&u)l||o.mkdirsSync(h),s(t,i,{overwrite:a.overwrite,errorOnExist:a.errorOnExist,preserveTimestamps:a.preserveTimestamps});else if(c.isDirectory()&&u)n.existsSync(i)||o.mkdirsSync(i),n.readdirSync(t).forEach(n=>{const s=a;s.recursive=!0,e(r.join(t,n),r.join(i,n),s)});else if(a.recursive&&c.isSymbolicLink()&&u){const e=n.readlinkSync(t);n.symlinkSync(e,i)}}},function(e,t,i){"use strict";const n=i(1),r=65536,s=i(32)(r);e.exports=function(e,t,i){const o=i.overwrite,a=i.errorOnExist,c=i.preserveTimestamps;if(n.existsSync(t)){if(!o){if(a)throw new Error(`${t} already exists`);return}n.unlinkSync(t)}const h=n.openSync(e,"r"),l=n.fstatSync(h),u=n.openSync(t,"w",l.mode);let f=1,d=0;for(;f>0;)f=n.readSync(h,s,0,r,d),n.writeSync(u,s,0,f),d+=f;c&&n.futimesSync(u,l.atime,l.mtime),n.closeSync(h),n.closeSync(u)}},function(e,t,i){"use strict";const n=i(1),r=i(0),s=i(12),o="win32"===process.platform;function a(e){["unlink","chmod","stat","lstat","rmdir","readdir"].forEach(t=>{e[t]=e[t]||n[t],e[t+="Sync"]=e[t]||n[t]}),e.maxBusyTries=e.maxBusyTries||3}function c(e,t,i){let n=0;"function"==typeof t&&(i=t,t={}),s(e,"rimraf: missing path"),s.equal(typeof e,"string","rimraf: path should be a string"),s.equal(typeof i,"function","rimraf: callback function required"),s(t,"rimraf: invalid options argument provided"),s.equal(typeof t,"object","rimraf: options should be object"),a(t),h(e,t,function r(s){if(s){if(("EBUSY"===s.code||"ENOTEMPTY"===s.code||"EPERM"===s.code)&&nh(e,t,r),100*n)}"ENOENT"===s.code&&(s=null)}i(s)})}function h(e,t,i){s(e),s(t),s("function"==typeof i),t.lstat(e,(n,r)=>n&&"ENOENT"===n.code?i(null):n&&"EPERM"===n.code&&o?l(e,t,n,i):r&&r.isDirectory()?f(e,t,n,i):void t.unlink(e,n=>{if(n){if("ENOENT"===n.code)return i(null);if("EPERM"===n.code)return o?l(e,t,n,i):f(e,t,n,i);if("EISDIR"===n.code)return f(e,t,n,i)}return i(n)}))}function l(e,t,i,n){s(e),s(t),s("function"==typeof n),i&&s(i instanceof Error),t.chmod(e,438,r=>{r?n("ENOENT"===r.code?null:i):t.stat(e,(r,s)=>{r?n("ENOENT"===r.code?null:i):s.isDirectory()?f(e,t,i,n):t.unlink(e,n)})})}function u(e,t,i){let n;s(e),s(t),i&&s(i instanceof Error);try{t.chmodSync(e,438)}catch(e){if("ENOENT"===e.code)return;throw i}try{n=t.statSync(e)}catch(e){if("ENOENT"===e.code)return;throw i}n.isDirectory()?p(e,t,i):t.unlinkSync(e)}function f(e,t,i,n){s(e),s(t),i&&s(i instanceof Error),s("function"==typeof n),t.rmdir(e,o=>{!o||"ENOTEMPTY"!==o.code&&"EEXIST"!==o.code&&"EPERM"!==o.code?o&&"ENOTDIR"===o.code?n(i):n(o):function(e,t,i){s(e),s(t),s("function"==typeof i),t.readdir(e,(n,s)=>{if(n)return i(n);let o,a=s.length;if(0===a)return t.rmdir(e,i);s.forEach(n=>{c(r.join(e,n),t,n=>{if(!o)return n?i(o=n):void(0==--a&&t.rmdir(e,i))})})})}(e,t,n)})}function d(e,t){let i;a(t=t||{}),s(e,"rimraf: missing path"),s.equal(typeof e,"string","rimraf: path should be a string"),s(t,"rimraf: missing options"),s.equal(typeof t,"object","rimraf: options should be object");try{i=t.lstatSync(e)}catch(i){if("ENOENT"===i.code)return;"EPERM"===i.code&&o&&u(e,t,i)}try{i&&i.isDirectory()?p(e,t,null):t.unlinkSync(e)}catch(i){if("ENOENT"===i.code)return;if("EPERM"===i.code)return o?u(e,t,i):p(e,t,i);if("EISDIR"!==i.code)throw i;p(e,t,i)}}function p(e,t,i){s(e),s(t),i&&s(i instanceof Error);try{t.rmdirSync(e)}catch(n){if("ENOTDIR"===n.code)throw i;if("ENOTEMPTY"===n.code||"EEXIST"===n.code||"EPERM"===n.code)!function(e,t){s(e),s(t),t.readdirSync(e).forEach(i=>d(r.join(e,i),t));const i=o?100:1;let n=0;for(;;){let r=!0;try{const s=t.rmdirSync(e,t);return r=!1,s}finally{if(++nn?a(n):s?o.writeJson(e,t,i,a):void r.mkdirs(c,n=>{if(n)return a(n);o.writeJson(e,t,i,a)}))}},function(e,t,i){"use strict";const n=i(1),r=i(0),s=i(4),o=i(19);e.exports=function(e,t,i){const a=r.dirname(e);n.existsSync(a)||s.mkdirsSync(a),o.writeJsonSync(e,t,i)}},function(e,t,i){"use strict";const n=i(3).fromCallback,r=i(1),s=i(28),o=i(0),a=i(13).remove,c=i(4).mkdirs;function h(e,t,i,n){r.stat(e,(s,o)=>{if(s)return n(s);o.isDirectory()?l(e,t,i,n):function(e,t,i,n){const s=i?"w":"wx",o=r.createReadStream(e),a=r.createWriteStream(t,{flags:s});function c(){r.unlink(e,n)}o.on("error",s=>{o.destroy(),a.destroy(),a.removeListener("close",c),r.unlink(t,()=>{"EISDIR"===s.code||"EPERM"===s.code?l(e,t,i,n):n(s)})}),a.on("error",e=>{o.destroy(),a.destroy(),a.removeListener("close",c),n(e)}),a.once("close",c),o.pipe(a)}(e,t,i,n)})}function l(e,t,i,n){const r={overwrite:!1};function o(){s(e,t,r,t=>{if(t)return n(t);a(e,n)})}i?a(t,e=>{if(e)return n(e);o()}):o()}e.exports={move:n(function e(t,i,n,s){"function"==typeof n&&(s=n,n={});const l=n.overwrite||n.clobber||!1;!function(e,t,i){r.stat(e,(n,r)=>{if(n)return i(n);if(r.isDirectory()){const n=t.split(o.dirname(e)+o.sep)[1];if(n){const r=n.split(o.sep)[0];return i(null,!!r&&(e!==t&&t.indexOf(e)>-1&&r===o.basename(e)))}return i(null,!1)}return i(null,!1)})}(t,i,(u,f)=>u?s(u):f?s(new Error(`Cannot move '${t}' to a subdirectory of itself, '${i}'.`)):void c(o.dirname(i),c=>{if(c)return s(c);o.resolve(t)===o.resolve(i)?r.access(t,s):l?r.rename(t,i,r=>{if(!r)return s();if("ENOTEMPTY"!==r.code&&"EEXIST"!==r.code){if("EPERM"!==r.code)return"EXDEV"!==r.code?s(r):void h(t,i,l,s);setTimeout(()=>{a(i,r=>{if(r)return s(r);n.overwrite=!1,e(t,i,n,s)})},200)}else a(i,r=>{if(r)return s(r);n.overwrite=!1,e(t,i,n,s)})}):r.link(t,i,e=>e?"EXDEV"===e.code||"EISDIR"===e.code||"EPERM"===e.code||"ENOTSUP"===e.code?h(t,i,l,s):s(e):r.unlink(t,s))}))})}},function(e,t,i){"use strict";const n=i(1),r=i(0),s=i(31).copySync,o=i(13).removeSync,a=i(4).mkdirsSync,c=i(32);function h(e,t,i){return n.statSync(e).isDirectory()?function(e,t,i){const n={overwrite:!1};i?(o(t),r()):r();function r(){return s(e,t,n),o(e)}}(e,t,i):function(e,t,i){const r=c(65536),s=i?"w":"wx",o=n.openSync(e,"r"),a=n.fstatSync(o),h=n.openSync(t,s,a.mode);let l=1,u=0;for(;l>0;)l=n.readSync(o,r,0,65536,u),n.writeSync(h,r,0,l),u+=l;return n.closeSync(o),n.closeSync(h),n.unlinkSync(e)}(e,t,i)}e.exports={moveSync:function e(t,i,s){const c=(s=s||{}).overwrite||s.clobber||!1;if(t=r.resolve(t),i=r.resolve(i),t===i)return n.accessSync(t);if(function(e,t){try{return n.statSync(e).isDirectory()&&e!==t&&t.indexOf(e)>-1&&t.split(r.dirname(e)+r.sep)[1].split(r.sep)[0]===r.basename(e)}catch(e){return!1}}(t,i))throw new Error(`Cannot move '${t}' into itself '${i}'.`);a(r.dirname(i)),function(){if(c)try{n.renameSync(t,i)}catch(n){if("ENOTEMPTY"===n.code||"EEXIST"===n.code||"EPERM"===n.code)return o(i),s.overwrite=!1,e(t,i,s);if("EXDEV"!==n.code)throw n;return h(t,i,c)}else try{n.linkSync(t,i),n.unlinkSync(t)}catch(e){if("EXDEV"===e.code||"EISDIR"===e.code||"EPERM"===e.code||"ENOTSUP"===e.code)return h(t,i,c);throw e}}()}}},function(e,t,i){"use strict";const n=i(3).fromCallback,r=i(2),s=i(0),o=i(4),a=i(13),c=n(function(e,t){t=t||function(){},r.readdir(e,(i,n)=>{if(i)return o.mkdirs(e,t);n=n.map(t=>s.join(e,t)),function e(){const i=n.pop();if(!i)return t();a.remove(i,i=>{if(i)return t(i);e()})}()})});function h(e){let t;try{t=r.readdirSync(e)}catch(t){return o.mkdirsSync(e)}t.forEach(t=>{t=s.join(e,t),a.removeSync(t)})}e.exports={emptyDirSync:h,emptydirSync:h,emptyDir:c,emptydir:c}},function(e,t,i){"use strict";const n=i(70),r=i(71),s=i(72);e.exports={createFile:n.createFile,createFileSync:n.createFileSync,ensureFile:n.createFile,ensureFileSync:n.createFileSync,createLink:r.createLink,createLinkSync:r.createLinkSync,ensureLink:r.createLink,ensureLinkSync:r.createLinkSync,createSymlink:s.createSymlink,createSymlinkSync:s.createSymlinkSync,ensureSymlink:s.createSymlink,ensureSymlinkSync:s.createSymlinkSync}},function(e,t,i){"use strict";const n=i(3).fromCallback,r=i(0),s=i(1),o=i(4),a=i(5).pathExists;e.exports={createFile:n(function(e,t){function i(){s.writeFile(e,"",e=>{if(e)return t(e);t()})}s.stat(e,(n,s)=>{if(!n&&s.isFile())return t();const c=r.dirname(e);a(c,(e,n)=>e?t(e):n?i():void o.mkdirs(c,e=>{if(e)return t(e);i()}))})}),createFileSync:function(e){let t;try{t=s.statSync(e)}catch(e){}if(t&&t.isFile())return;const i=r.dirname(e);s.existsSync(i)||o.mkdirsSync(i),s.writeFileSync(e,"")}}},function(e,t,i){"use strict";const n=i(3).fromCallback,r=i(0),s=i(1),o=i(4),a=i(5).pathExists;e.exports={createLink:n(function(e,t,i){function n(e,t){s.link(e,t,e=>{if(e)return i(e);i(null)})}a(t,(c,h)=>c?i(c):h?i(null):void s.lstat(e,(s,c)=>{if(s)return s.message=s.message.replace("lstat","ensureLink"),i(s);const h=r.dirname(t);a(h,(r,s)=>r?i(r):s?n(e,t):void o.mkdirs(h,r=>{if(r)return i(r);n(e,t)}))}))}),createLinkSync:function(e,t,i){if(s.existsSync(t))return;try{s.lstatSync(e)}catch(e){throw e.message=e.message.replace("lstat","ensureLink"),e}const n=r.dirname(t);return s.existsSync(n)?s.linkSync(e,t):(o.mkdirsSync(n),s.linkSync(e,t))}}},function(e,t,i){"use strict";const n=i(3).fromCallback,r=i(0),s=i(1),o=i(4),a=o.mkdirs,c=o.mkdirsSync,h=i(73),l=h.symlinkPaths,u=h.symlinkPathsSync,f=i(74),d=f.symlinkType,p=f.symlinkTypeSync,m=i(5).pathExists;e.exports={createSymlink:n(function(e,t,i,n){n="function"==typeof i?i:n,i="function"!=typeof i&&i,m(t,(o,c)=>o?n(o):c?n(null):void l(e,t,(o,c)=>{if(o)return n(o);e=c.toDst,d(c.toCwd,i,(i,o)=>{if(i)return n(i);const c=r.dirname(t);m(c,(i,r)=>i?n(i):r?s.symlink(e,t,o,n):void a(c,i=>{if(i)return n(i);s.symlink(e,t,o,n)}))})}))}),createSymlinkSync:function(e,t,i,n){if(n="function"==typeof i?i:n,i="function"!=typeof i&&i,s.existsSync(t))return;const o=u(e,t);e=o.toDst,i=p(o.toCwd,i);const a=r.dirname(t);return s.existsSync(a)?s.symlinkSync(e,t,i):(c(a),s.symlinkSync(e,t,i))}}},function(e,t,i){"use strict";const n=i(0),r=i(1),s=i(5).pathExists;e.exports={symlinkPaths:function(e,t,i){if(n.isAbsolute(e))return r.lstat(e,(t,n)=>t?(t.message=t.message.replace("lstat","ensureSymlink"),i(t)):i(null,{toCwd:e,toDst:e}));{const o=n.dirname(t),a=n.join(o,e);return s(a,(t,s)=>t?i(t):s?i(null,{toCwd:a,toDst:e}):r.lstat(e,(t,r)=>t?(t.message=t.message.replace("lstat","ensureSymlink"),i(t)):i(null,{toCwd:e,toDst:n.relative(o,e)})))}},symlinkPathsSync:function(e,t){let i;if(n.isAbsolute(e)){if(!(i=r.existsSync(e)))throw new Error("absolute srcpath does not exist");return{toCwd:e,toDst:e}}{const s=n.dirname(t),o=n.join(s,e);if(i=r.existsSync(o))return{toCwd:o,toDst:e};if(!(i=r.existsSync(e)))throw new Error("relative srcpath does not exist");return{toCwd:e,toDst:n.relative(s,e)}}}}},function(e,t,i){"use strict";const n=i(1);e.exports={symlinkType:function(e,t,i){if(i="function"==typeof t?t:i,t="function"!=typeof t&&t)return i(null,t);n.lstat(e,(e,n)=>{if(e)return i(null,"file");t=n&&n.isDirectory()?"dir":"file",i(null,t)})},symlinkTypeSync:function(e,t){let i;if(t)return t;try{i=n.lstatSync(e)}catch(e){return"file"}return i&&i.isDirectory()?"dir":"file"}}},function(e,t,i){"use strict";const n=i(3).fromCallback,r=i(1),s=i(0),o=i(4),a=i(5).pathExists;e.exports={outputFile:n(function(e,t,i,n){"function"==typeof i&&(n=i,i="utf8");const c=s.dirname(e);a(c,(s,a)=>s?n(s):a?r.writeFile(e,t,i,n):void o.mkdirs(c,s=>{if(s)return n(s);r.writeFile(e,t,i,n)}))}),outputFileSync:function(e,t,i){const n=s.dirname(e);if(r.existsSync(n))return r.writeFileSync.apply(r,arguments);o.mkdirsSync(n),r.writeFileSync.apply(r,arguments)}}},function(e,t,i){"use strict";function n(e){for(var i in e)t.hasOwnProperty(i)||(t[i]=e[i])}Object.defineProperty(t,"__esModule",{value:!0}),n(i(77)),n(i(78)),n(i(79))},function(e,t,i){"use strict";Object.defineProperty(t,"__esModule",{value:!0});class n{constructor(){this._children={}}static of(e){const t=new n;for(const i of Object.values(e.types||{}))t.register(i.fqn);return t}get children(){return this._children}get fqn(){return this._fqn}register(e,t=e.split(".")){if(0===t.length)this._fqn=e;else{const[i,...r]=t;this._children[i]||(this._children[i]=new n),this._children[i].register(e,r)}return this}}t.NameTree=n},function(e,t,i){"use strict";var n;function r(e){return null!=e&&!!e.fqn}function s(e){return null!=e&&!!e.primitive}function o(e){return null!=e&&!!e.collection}function a(e){return null!=e&&!!e.union}function c(e){return null!=e&&e.kind===n.Class}function h(e){return null!=e&&e.kind===n.Interface}Object.defineProperty(t,"__esModule",{value:!0}),t.SPEC_FILE_NAME=".jsii",function(e){e.V1_0="jsii/1.0"}(t.SchemaVersion||(t.SchemaVersion={})),function(e){e.Array="array",e.Map="map"}(t.CollectionKind||(t.CollectionKind={})),function(e){e.Date="date",e.String="string",e.Number="number",e.Boolean="boolean",e.Json="json",e.Any="any"}(t.PrimitiveType||(t.PrimitiveType={})),t.isNamedTypeReference=r,t.isPrimitiveTypeReference=s,t.isCollectionTypeReference=o,t.isUnionTypeReference=a,function(e){e.Class="class",e.Enum="enum",e.Interface="interface"}(n=t.TypeKind||(t.TypeKind={})),t.isClassType=c,t.isInterfaceType=h,t.isEnumType=function(e){return null!=e&&e.kind===n.Enum},t.isClassOrInterfaceType=function(e){return c(e)||h(e)},t.describeTypeReference=function e(t){if(void 0===t)return"(none)";const i=t.optional?"?":"";if(r(t))return`${t.fqn}${i}`;if(s(t))return`${t.primitive}${i}`;if(o(t))return`${t.collection.kind}<${e(t.collection.elementtype)}>${i}`;if(a(t)){const n=t.union.types.map(e).join(" | ");return t.optional?`(${n})${i}`:n}throw new Error("Unrecognized type reference")}},function(e,t,i){"use strict";Object.defineProperty(t,"__esModule",{value:!0});const n=i(80);t.schema=i(83),t.validateAssembly=function(e){const i=(new n.Validator).validate(e,t.schema);if(i.valid)return e;throw new Error(`Invalid assembly:\n${i}`)}},function(e,t,i){"use strict";var n=e.exports.Validator=i(81);e.exports.ValidatorResult=i(6).ValidatorResult,e.exports.ValidationError=i(6).ValidationError,e.exports.SchemaError=i(6).SchemaError,e.exports.SchemaScanResult=i(21).SchemaScanResult,e.exports.scan=i(21).scan,e.exports.validate=function(e,t,i){return(new n).validate(e,t,i)}},function(e,t,i){"use strict";var n=i(20),r=i(82),s=i(6),o=i(21).scan,a=s.ValidatorResult,c=s.SchemaError,h=s.SchemaContext,l=function e(){this.customFormats=Object.create(e.prototype.customFormats),this.schemas={},this.unresolvedRefs=[],this.types=Object.create(f),this.attributes=Object.create(r.validators)};function u(e){var t="string"==typeof e?e:e.$ref;return"string"==typeof t&&t}l.prototype.customFormats={},l.prototype.schemas=null,l.prototype.types=null,l.prototype.attributes=null,l.prototype.unresolvedRefs=null,l.prototype.addSchema=function(e,t){var i=this;if(!e)return null;var n=o(t||"/",e),r=t||e.id;for(var s in n.id)this.schemas[s]=n.id[s];for(var s in n.ref)this.unresolvedRefs.push(s);return this.unresolvedRefs=this.unresolvedRefs.filter(function(e){return void 0===i.schemas[e]}),this.schemas[r]},l.prototype.addSubSchemaArray=function(e,t){if(t instanceof Array)for(var i=0;i",e);var h=s.objectGetPath(i.schemas[a],o.substr(1));if(void 0===h)throw new c("no such schema "+o+" located in <"+a+">",e);return{subschema:h,switchSchema:t}},l.prototype.testType=function(e,t,i,n,r){if("function"==typeof this.types[r])return this.types[r].call(this,e);if(r&&"object"==typeof r){var s=this.validateSchema(e,r,i,n);return void 0===s||!(s&&s.errors.length)}return!0};var f=l.prototype.types={};f.string=function(e){return"string"==typeof e},f.number=function(e){return"number"==typeof e&&isFinite(e)},f.integer=function(e){return"number"==typeof e&&e%1==0},f.boolean=function(e){return"boolean"==typeof e},f.array=function(e){return Array.isArray(e)},f.null=function(e){return null===e},f.date=function(e){return e instanceof Date},f.any=function(e){return!0},f.object=function(e){return e&&"object"==typeof e&&!(e instanceof Array)&&!(e instanceof Date)},e.exports=l},function(e,t,i){"use strict";var n=i(6),r=n.ValidatorResult,s=n.SchemaError,o={ignoreProperties:{id:!0,default:!0,description:!0,title:!0,exclusiveMinimum:!0,exclusiveMaximum:!0,additionalItems:!0,$schema:!0,$ref:!0,extends:!0}},a=o.validators={};function c(e,t,i,n,r){var s=t.throwError;t.throwError=!1;var o=this.validateSchema(e,r,t,i);return t.throwError=s,!o.valid&&n instanceof Function&&n(o),o.valid}function h(e,t,i,n,r,s){if(this.types.object(e)&&(!t.properties||void 0===t.properties[r]))if(!1===t.additionalProperties)s.addError({name:"additionalProperties",argument:r,message:"additionalProperty "+JSON.stringify(r)+" exists in instance when not allowed"});else{var o=t.additionalProperties||{};"function"==typeof i.preValidateProperty&&i.preValidateProperty(e,r,o,i,n);var a=this.validateSchema(e[r],o,i,n.makeChild(o,r));a.instance!==s.instance[r]&&(s.instance[r]=a.instance),s.importErrors(a)}}a.type=function(e,t,i,n){if(void 0===e)return null;var s=new r(e,t,i,n),o=Array.isArray(t.type)?t.type:[t.type];if(!o.some(this.testType.bind(this,e,t,i,n))){var a=o.map(function(e){return e.id&&"<"+e.id+">"||e+""});s.addError({name:"type",argument:a,message:"is not of a type(s) "+a})}return s},a.anyOf=function(e,t,i,n){if(void 0===e)return null;var o=new r(e,t,i,n),a=new r(e,t,i,n);if(!Array.isArray(t.anyOf))throw new s("anyOf must be an array");if(!t.anyOf.some(c.bind(this,e,i,n,function(e){a.importErrors(e)}))){var h=t.anyOf.map(function(e,t){return e.id&&"<"+e.id+">"||e.title&&JSON.stringify(e.title)||e.$ref&&"<"+e.$ref+">"||"[subschema "+t+"]"});i.nestedErrors&&o.importErrors(a),o.addError({name:"anyOf",argument:h,message:"is not any of "+h.join(",")})}return o},a.allOf=function(e,t,i,n){if(void 0===e)return null;if(!Array.isArray(t.allOf))throw new s("allOf must be an array");var o=new r(e,t,i,n),a=this;return t.allOf.forEach(function(t,r){var s=a.validateSchema(e,t,i,n);if(!s.valid){var c=t.id&&"<"+t.id+">"||t.title&&JSON.stringify(t.title)||t.$ref&&"<"+t.$ref+">"||"[subschema "+r+"]";o.addError({name:"allOf",argument:{id:c,length:s.errors.length,valid:s},message:"does not match allOf schema "+c+" with "+s.errors.length+" error[s]:"}),o.importErrors(s)}}),o},a.oneOf=function(e,t,i,n){if(void 0===e)return null;if(!Array.isArray(t.oneOf))throw new s("oneOf must be an array");var o=new r(e,t,i,n),a=new r(e,t,i,n),h=t.oneOf.filter(c.bind(this,e,i,n,function(e){a.importErrors(e)})).length,l=t.oneOf.map(function(e,t){return e.id&&"<"+e.id+">"||e.title&&JSON.stringify(e.title)||e.$ref&&"<"+e.$ref+">"||"[subschema "+t+"]"});return 1!==h&&(i.nestedErrors&&o.importErrors(a),o.addError({name:"oneOf",argument:l,message:"is not exactly one from "+l.join(",")})),o},a.properties=function(e,t,i,n){if(this.types.object(e)){var s=new r(e,t,i,n),o=t.properties||{};for(var a in o){"function"==typeof i.preValidateProperty&&i.preValidateProperty(e,a,o[a],i,n);var c=Object.hasOwnProperty.call(e,a)?e[a]:void 0,h=this.validateSchema(c,o[a],i,n.makeChild(o[a],a));h.instance!==s.instance[a]&&(s.instance[a]=h.instance),s.importErrors(h)}return s}},a.patternProperties=function(e,t,i,n){if(this.types.object(e)){var s=new r(e,t,i,n),o=t.patternProperties||{};for(var a in e){var c=!0;for(var l in o){if(new RegExp(l).test(a)){c=!1,"function"==typeof i.preValidateProperty&&i.preValidateProperty(e,a,o[l],i,n);var u=this.validateSchema(e[a],o[l],i,n.makeChild(o[l],a));u.instance!==s.instance[a]&&(s.instance[a]=u.instance),s.importErrors(u)}}c&&h.call(this,e,t,i,n,a,s)}return s}},a.additionalProperties=function(e,t,i,n){if(this.types.object(e)){if(t.patternProperties)return null;var s=new r(e,t,i,n);for(var o in e)h.call(this,e,t,i,n,o,s);return s}},a.minProperties=function(e,t,i,n){if(this.types.object(e)){var s=new r(e,t,i,n);return Object.keys(e).length>=t.minProperties||s.addError({name:"minProperties",argument:t.minProperties,message:"does not meet minimum property length of "+t.minProperties}),s}},a.maxProperties=function(e,t,i,n){if(this.types.object(e)){var s=new r(e,t,i,n);return Object.keys(e).length<=t.maxProperties||s.addError({name:"maxProperties",argument:t.maxProperties,message:"does not meet maximum property length of "+t.maxProperties}),s}},a.items=function(e,t,i,n){var s=this;if(this.types.array(e)&&t.items){var o=new r(e,t,i,n);return e.every(function(e,r){var a=Array.isArray(t.items)?t.items[r]||t.additionalItems:t.items;if(void 0===a)return!0;if(!1===a)return o.addError({name:"items",message:"additionalItems not permitted"}),!1;var c=s.validateSchema(e,a,i,n.makeChild(a,r));return c.instance!==o.instance[r]&&(o.instance[r]=c.instance),o.importErrors(c),!0}),o}},a.minimum=function(e,t,i,n){if(this.types.number(e)){var s=new r(e,t,i,n);return(t.exclusiveMinimum&&!0===t.exclusiveMinimum?e>t.minimum:e>=t.minimum)||s.addError({name:"minimum",argument:t.minimum,message:"must have a minimum value of "+t.minimum}),s}},a.maximum=function(e,t,i,n){if(this.types.number(e)){var s=new r(e,t,i,n);return(t.exclusiveMaximum&&!0===t.exclusiveMaximum?e=t.minLength||s.addError({name:"minLength",argument:t.minLength,message:"does not meet minimum length of "+t.minLength}),s}},a.maxLength=function(e,t,i,n){if(this.types.string(e)){var s=new r(e,t,i,n),o=e.match(/[\uDC00-\uDFFF]/g);return e.length-(o?o.length:0)<=t.maxLength||s.addError({name:"maxLength",argument:t.maxLength,message:"does not meet maximum length of "+t.maxLength}),s}},a.minItems=function(e,t,i,n){if(this.types.array(e)){var s=new r(e,t,i,n);return e.length>=t.minItems||s.addError({name:"minItems",argument:t.minItems,message:"does not meet minimum length of "+t.minItems}),s}},a.maxItems=function(e,t,i,n){if(this.types.array(e)){var s=new r(e,t,i,n);return e.length<=t.maxItems||s.addError({name:"maxItems",argument:t.maxItems,message:"does not meet maximum length of "+t.maxItems}),s}},a.uniqueItems=function(e,t,i,s){if(this.types.array(e)){var o=new r(e,t,i,s);return e.every(function(e,t,i){for(var r=t+1;r"||r;o.addError({name:"not",argument:a,message:"is of prohibited type "+a})}}),o):null},e.exports=o},function(e){e.exports={$ref:"#/definitions/Assembly",$schema:"http://json-schema.org/draft-07/schema#",definitions:{Assembly:{description:"A JSII assembly specification.",properties:{author:{$ref:"#/definitions/Person",description:"The main author of this package."},bundled:{additionalProperties:{type:"string"},description:"List if bundled dependencies (these are not expected to be jsii assemblies).",type:"object"},contributors:{description:"Additional contributors to this package.",items:{$ref:"#/definitions/Person"},type:"array"},dependencies:{additionalProperties:{$ref:"#/definitions/PackageVersion"},description:"Dependencies on other assemblies (with semver), the key is the JSII assembly name.",type:"object"},description:{description:'Description of the assembly, maps to "description" from package.json\nThis is required since some package managers (like Maven) require it.',type:"string"},docs:{$ref:"#/definitions/Docs",description:"Key value pairs of documentation nodes.\nBased on JSDoc."},fingerprint:{description:"A fingerprint that can be used to determine if the specification has changed.",minLength:1,type:"string"},homepage:{description:'The url to the project homepage. Maps to "homepage" from package.json.',type:"string"},license:{description:"The SPDX name of the license this assembly is distributed on.",type:"string"},name:{description:"The name of the assembly",minLength:1,type:"string"},readme:{description:"The top-level readme document for this assembly (if any).",properties:{markdown:{type:"string"}},required:["markdown"],type:"object"},repository:{description:'The module repository, maps to "repository" from package.json\nThis is required since some package managers (like Maven) require it.',properties:{type:{description:"The type of the repository (``git``, ``svn``, ...)",type:"string"},url:{description:"The URL of the repository.",type:"string"}},required:["type","url"],type:"object"},schema:{description:"The version of the spec schema",enum:["jsii/1.0"],type:"string"},targets:{$ref:"#/definitions/AssemblyTargets",description:"A map of target name to configuration, which is used when generating packages for\nvarious languages."},types:{additionalProperties:{anyOf:[{allOf:[{$ref:"#/definitions/TypeBase"},{$ref:"#/definitions/ClassType"}]},{allOf:[{$ref:"#/definitions/TypeBase"},{$ref:"#/definitions/EnumType"}]},{allOf:[{$ref:"#/definitions/TypeBase"},{$ref:"#/definitions/InterfaceType"}]}]},description:"All types in the assembly, keyed by their fully-qualified-name",type:"object"},version:{description:"The version of the assembly",minLength:1,type:"string"}},required:["author","description","fingerprint","homepage","license","name","repository","schema","version"],type:"object"},AssemblyTargets:{additionalProperties:{additionalProperties:{},type:"object"},description:"Configurable targets for an asembly.",type:"object"},ClassType:{description:"Represents classes.",properties:{abstract:{description:"Indicates if this class is an abstract class.",type:"boolean"},assembly:{description:"The name of the assembly the type belongs to.",minLength:1,type:"string"},base:{$ref:"#/definitions/NamedTypeReference",description:"Base class (optional)."},docs:{$ref:"#/definitions/Docs",description:"Key value pairs of documentation nodes.\nBased on JSDoc."},fqn:{description:"The fully qualified name of the type (``..``)",minLength:3,type:"string"},initializer:{$ref:"#/definitions/Method",description:"Initializer (constructor) method."},interfaces:{description:"The set of interfaces implemented by this class.",items:{$ref:"#/definitions/NamedTypeReference"},type:"array"},kind:{description:"The kind of the type.",enum:["class"],type:"string"},methods:{description:"List of methods.",items:{$ref:"#/definitions/Method"},type:"array"},name:{description:"The simple name of the type (MyClass).",minLength:1,type:"string"},namespace:{description:"The namespace of the type (``foo.bar.baz``). When undefined, the type is located at the root of the assembly\n(it's ``fqn`` would be like ``.``). If the `namespace` corresponds to an existing type's\nnamespace-qualified (e.g: ``.``), then the current type is a nested type.",type:"string"},properties:{description:"List of properties.",items:{$ref:"#/definitions/Property"},type:"array"}},required:["assembly","fqn","kind","name"],type:"object"},CollectionKind:{description:"Kinds of collections.",enum:["array","map"],type:"string"},CollectionTypeReference:{description:"Reference to a collection type.",properties:{collection:{properties:{elementtype:{anyOf:[{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/NamedTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/PrimitiveTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/CollectionTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/UnionTypeReference"}]}],description:"The type of an element (map keys are always strings)."},kind:{$ref:"#/definitions/CollectionKind",description:"The kind of collection."}},required:["elementtype","kind"],type:"object"},optional:{description:"Indicates if this value is optional.",type:"boolean"},promise:{description:"Indicates if this type refers to a promise.",type:"boolean"}},required:["collection"],type:"object"},Docs:{additionalProperties:{type:"string"},description:"Key value pairs of documentation nodes.\nBased on JSDoc.",type:"object"},EnumMember:{description:"Represents a member of an enum.",properties:{docs:{$ref:"#/definitions/Docs",description:"Key value pairs of documentation nodes.\nBased on JSDoc."},name:{description:"The name/symbol of the member.",type:"string"}},required:["name"],type:"object"},EnumType:{description:"Represents an enum type.",properties:{assembly:{description:"The name of the assembly the type belongs to.",minLength:1,type:"string"},docs:{$ref:"#/definitions/Docs",description:"Key value pairs of documentation nodes.\nBased on JSDoc."},fqn:{description:"The fully qualified name of the type (``..``)",minLength:3,type:"string"},kind:{description:"The kind of the type.",enum:["enum"],type:"string"},members:{description:"Members of the enum.",items:{$ref:"#/definitions/EnumMember"},type:"array"},name:{description:"The simple name of the type (MyClass).",minLength:1,type:"string"},namespace:{description:"The namespace of the type (``foo.bar.baz``). When undefined, the type is located at the root of the assembly\n(it's ``fqn`` would be like ``.``). If the `namespace` corresponds to an existing type's\nnamespace-qualified (e.g: ``.``), then the current type is a nested type.",type:"string"}},required:["assembly","fqn","kind","members","name"],type:"object"},InterfaceType:{properties:{assembly:{description:"The name of the assembly the type belongs to.",minLength:1,type:"string"},datatype:{description:"True if this interface only contains properties. Different backends might\nhave idiomatic ways to allow defining concrete instances such interfaces.\nFor example, in Java, the generator will produce a PoJo and a builder\nwhich will allow users to create a concrete object with data which\nadheres to this interface.",type:"boolean"},docs:{$ref:"#/definitions/Docs",description:"Key value pairs of documentation nodes.\nBased on JSDoc."},fqn:{description:"The fully qualified name of the type (``..``)",minLength:3,type:"string"},interfaces:{description:"All the base interfaces that this interface extends.",items:{$ref:"#/definitions/NamedTypeReference"},type:"array"},kind:{description:"The kind of the type.",enum:["interface"],type:"string"},methods:{description:"List of methods.",items:{$ref:"#/definitions/Method"},type:"array"},name:{description:"The simple name of the type (MyClass).",minLength:1,type:"string"},namespace:{description:"The namespace of the type (``foo.bar.baz``). When undefined, the type is located at the root of the assembly\n(it's ``fqn`` would be like ``.``). If the `namespace` corresponds to an existing type's\nnamespace-qualified (e.g: ``.``), then the current type is a nested type.",type:"string"},properties:{description:"List of properties.",items:{$ref:"#/definitions/Property"},type:"array"}},required:["assembly","fqn","kind","name"],type:"object"},Method:{description:"Represents a method.",properties:{abstract:{description:"Is this method an abstract method (this means the class will also be an abstract class)",type:"boolean"},docs:{$ref:"#/definitions/Docs",description:"Key value pairs of documentation nodes.\nBased on JSDoc."},initializer:{description:"True if this method is an initializer, in which case it won't have a return type",type:"boolean"},name:{description:"The name of the method. Undefined if this method is a initializer.",type:"string"},overrides:{$ref:"#/definitions/NamedTypeReference",default:"undefined",description:"The name of the parent type (class or interface) that this entity overrides or implements. If undefined, then\nthis entity is the first in it's hierarchy to declare this entity."},parameters:{description:"The parameters of the method/initializer",items:{$ref:"#/definitions/Parameter"},type:"array"},protected:{description:"Indicates if this method is protected (otherwise it is public)",type:"boolean"},returns:{anyOf:[{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/NamedTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/PrimitiveTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/CollectionTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/UnionTypeReference"}]}],description:"The return type of the method (undefined if void or initializer)"},static:{description:"Indicates if this is a static method.",type:"boolean"},variadic:{description:"Indicates whether this method is variadic or not. When ``true``, the last\nelement of ``#parameters`` will also be flagged ``#variadic``.",type:"boolean"}},type:"object"},NamedTypeReference:{description:"Reference to a named type, defined by this assembly or one of it's dependencies.",properties:{fqn:{description:"The fully-qualified-name of the type (can be located in the\n``spec.types[fqn]``` of the assembly that defines the type).",type:"string"},optional:{description:"Indicates if this value is optional.",type:"boolean"},promise:{description:"Indicates if this type refers to a promise.",type:"boolean"}},required:["fqn"],type:"object"},PackageVersion:{description:"The version of a package.",properties:{dependencies:{additionalProperties:{$ref:"#/definitions/PackageVersion"},description:"Dependencies of this dependency",type:"object"},targets:{$ref:"#/definitions/AssemblyTargets",description:"Targets for a given assembly."},version:{description:"Version of the package.",minLength:1,type:"string"}},required:["version"],type:"object"},Parameter:{description:"Represents a method parameter.",properties:{docs:{$ref:"#/definitions/Docs",description:"Key value pairs of documentation nodes.\nBased on JSDoc."},name:{description:"The name of the parameter.",minLength:1,type:"string"},type:{anyOf:[{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/NamedTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/PrimitiveTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/CollectionTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/UnionTypeReference"}]}],description:"The type of the parameter."},variadic:{description:'Whather this argument is the "rest" of a variadic signature.\nThe ``#type`` is that of every individual argument of the variadic list.',type:"boolean"}},required:["name","type"],type:"object"},Person:{description:"Metadata about people or organizations associated with the project that\nresulted in the Assembly. Some of this metadata is required in order to\npublish to certain package repositories (for example, Maven Central), but is\nnot normalized, and the meaning of fields (role, for example), is up to each\nproject maintainer.",properties:{email:{description:"The email of the person",type:"string"},name:{description:"The name of the person",type:"string"},organization:{description:"If true, this person is, in fact, an organization",type:"boolean"},roles:{description:"A list of roles this person has in the project, for example `maintainer`,\n`contributor`, `owner`, ...",items:{type:"string"},type:"array"},url:{description:"The URL for the person",type:"string"}},required:["name","roles"],type:"object"},PrimitiveType:{description:"Kinds of primitive types.",enum:["any","boolean","date","json","number","string"],type:"string"},PrimitiveTypeReference:{description:"Reference to a primitive type.",properties:{optional:{description:"Indicates if this value is optional.",type:"boolean"},primitive:{$ref:"#/definitions/PrimitiveType",description:"If this is a reference to a primitive type, this will include the\nprimitive type kind."},promise:{description:"Indicates if this type refers to a promise.",type:"boolean"}},required:["primitive"],type:"object"},Property:{description:"A class property.",properties:{abstract:{description:"Indicates if this property is abstract",type:"boolean"},const:{description:'A hint that indicates that this static, immutable property is initialized\nduring startup. This allows emitting "const" idioms in different target languages.\nImplies `static` and `immutable`.',type:"boolean"},docs:{$ref:"#/definitions/Docs",description:"Key value pairs of documentation nodes.\nBased on JSDoc."},immutable:{description:"Indicates if this property only has a getter (immutable).",type:"boolean"},name:{description:"The name of the property.",minLength:1,type:"string"},overrides:{$ref:"#/definitions/NamedTypeReference",default:"undefined",description:"The name of the parent type (class or interface) that this entity overrides or implements. If undefined, then\nthis entity is the first in it's hierarchy to declare this entity."},protected:{description:"Indicates if this property is protected (otherwise it is public)",type:"boolean"},static:{description:"Indicates if this is a static property.",type:"boolean"},type:{anyOf:[{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/NamedTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/PrimitiveTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/CollectionTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/UnionTypeReference"}]}],description:"The type of the property."}},required:["name","type"],type:"object"},TypeBase:{description:"Common attributes of a type definition.",properties:{assembly:{description:"The name of the assembly the type belongs to.",minLength:1,type:"string"},docs:{$ref:"#/definitions/Docs",description:"Key value pairs of documentation nodes.\nBased on JSDoc."},fqn:{description:"The fully qualified name of the type (``..``)",minLength:3,type:"string"},kind:{$ref:"#/definitions/TypeKind",description:"The kind of the type."},name:{description:"The simple name of the type (MyClass).",minLength:1,type:"string"},namespace:{description:"The namespace of the type (``foo.bar.baz``). When undefined, the type is located at the root of the assembly\n(it's ``fqn`` would be like ``.``). If the `namespace` corresponds to an existing type's\nnamespace-qualified (e.g: ``.``), then the current type is a nested type.",type:"string"}},required:["assembly","fqn","kind","name"],type:"object"},TypeKind:{description:"Kinds of types.",enum:["class","enum","interface"],type:"string"},TypeReferenceBase:{description:"Common attributes of a TypeReference.",properties:{optional:{description:"Indicates if this value is optional.",type:"boolean"},promise:{description:"Indicates if this type refers to a promise.",type:"boolean"}},type:"object"},UnionTypeReference:{description:"Reference to a union type.",properties:{optional:{description:"Indicates if this value is optional.",type:"boolean"},promise:{description:"Indicates if this type refers to a promise.",type:"boolean"},union:{description:"Indicates that this is a union type, which means it can be one of a set of types.",properties:{types:{description:"All the possible types (including the primary type).",items:{anyOf:[{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/NamedTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/PrimitiveTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/CollectionTypeReference"}]},{allOf:[{$ref:"#/definitions/TypeReferenceBase"},{$ref:"#/definitions/UnionTypeReference"}]}]},minItems:2,type:"array"}},required:["types"],type:"object"}},required:["union"],type:"object"}}}},function(e,t,i){"use strict";t.c=t.create=i(85),t.r=t.replace=i(38),t.t=t.list=i(25),t.u=t.update=i(91),t.x=t.extract=i(92),t.Pack=i(22),t.Unpack=i(39),t.Parse=i(18),t.ReadEntry=i(16),t.WriteEntry=i(36),t.Header=i(10),t.Pax=i(23),t.types=i(17)},function(e,t,i){"use strict";const n=i(8),r=i(22),s=(i(2),i(11)),o=i(25),a=i(0),c=(e.exports=((e,t,i)=>{if("function"==typeof t&&(i=t),Array.isArray(e)&&(t=e,e={}),!t||!Array.isArray(t)||!t.length)throw new TypeError("no files or directories specified");t=Array.from(t);const r=n(e);if(r.sync&&"function"==typeof i)throw new TypeError("callback not supported for sync tar functions");if(!r.file&&"function"==typeof i)throw new TypeError("callback only supported with file option");return r.file&&r.sync?c(r,t):r.file?h(r,t,i):r.sync?f(r,t):d(r,t)}),(e,t)=>{const i=new r.Sync(e),n=new s.WriteStreamSync(e.file,{mode:e.mode||438});i.pipe(n),l(i,t)}),h=(e,t,i)=>{const n=new r(e),o=new s.WriteStream(e.file,{mode:e.mode||438});n.pipe(o);const a=new Promise((e,t)=>{o.on("error",t),o.on("close",e),n.on("error",t)});return u(n,t),i?a.then(i,i):a},l=(e,t)=>{t.forEach(t=>{"@"===t.charAt(0)?o({file:a.resolve(e.cwd,t.substr(1)),sync:!0,noResume:!0,onentry:t=>e.add(t)}):e.add(t)}),e.end()},u=(e,t)=>{for(;t.length;){const i=t.shift();if("@"===i.charAt(0))return o({file:a.resolve(e.cwd,i.substr(1)),noResume:!0,onentry:t=>e.add(t)}).then(i=>u(e,t));e.add(i)}e.end()},f=(e,t)=>{const i=new r.Sync(e);return l(i,t),i},d=(e,t)=>{const i=new r(e);return u(i,t),i}},function(e,t,i){"use strict";i(15).prototype[Symbol.iterator]=function*(){for(let e=this.head;e;e=e.next)yield e.value}},function(e,t){e.exports=require("string_decoder")},function(e,t){e.exports=Object.freeze({Z_NO_FLUSH:0,Z_PARTIAL_FLUSH:1,Z_SYNC_FLUSH:2,Z_FULL_FLUSH:3,Z_FINISH:4,Z_BLOCK:5,Z_OK:0,Z_STREAM_END:1,Z_NEED_DICT:2,Z_ERRNO:-1,Z_STREAM_ERROR:-2,Z_DATA_ERROR:-3,Z_MEM_ERROR:-4,Z_BUF_ERROR:-5,Z_VERSION_ERROR:-6,Z_NO_COMPRESSION:0,Z_BEST_SPEED:1,Z_BEST_COMPRESSION:9,Z_DEFAULT_COMPRESSION:-1,Z_FILTERED:1,Z_HUFFMAN_ONLY:2,Z_RLE:3,Z_FIXED:4,Z_DEFAULT_STRATEGY:0,ZLIB_VERNUM:4736,DEFLATE:1,INFLATE:2,GZIP:3,GUNZIP:4,DEFLATERAW:5,INFLATERAW:6,UNZIP:7,Z_MIN_WINDOWBITS:8,Z_MAX_WINDOWBITS:15,Z_DEFAULT_WINDOWBITS:15,Z_MIN_CHUNK:64,Z_MAX_CHUNK:1/0,Z_DEFAULT_CHUNK:16384,Z_MIN_MEMLEVEL:1,Z_MAX_MEMLEVEL:9,Z_DEFAULT_MEMLEVEL:8,Z_MIN_LEVEL:-1,Z_MAX_LEVEL:9,Z_DEFAULT_LEVEL:-1})},function(e,t,i){"use strict";t.encode=((e,t)=>(t[t.length-1]=32,e<0?r(e,t):n(e,t),t));const n=(e,t)=>{t[0]=128;for(var i=t.length-2;i>0;i--)0===e?t[i]=0:(t[i]=e%256,e=Math.floor(e/256))},r=(e,t)=>{t[0]=255;var i=!1;e*=-1;for(var n=t.length-2;n>0;n--){var r;0===e?r=0:(r=e%256,e=Math.floor(e/256)),i?t[n]=a(r):0===r?t[n]=0:(i=!0,t[n]=c(r))}},s=(t.parse=(e=>{e[e.length-1];return 128===e[0]?o(e.slice(1,e.length-1)):s(e.slice(1,e.length-1))}),e=>{for(var t=e.length,i=0,n=!1,r=t-1;r>-1;r--){var s,o=e[r];n?s=a(o):0===o?s=o:(n=!0,s=c(o)),0!==s&&(i+=s*Math.pow(256,t-r-1))}return-1*i}),o=e=>{for(var t=e.length,i=0,n=t-1;n>-1;n--){var r=e[n];0!==r&&(i+=r*Math.pow(256,t-n-1))}return i},a=e=>255&(255^e),c=e=>1+(255^e)&255},function(e,t,i){"use strict";e.exports=((e,t)=>(e&=4095,t&&(256&e&&(e|=64),32&e&&(e|=8),4&e&&(e|=1)),e))},function(e,t,i){"use strict";const n=i(8),r=i(38),s=(e.exports=((e,t,i)=>{const o=n(e);if(!o.file)throw new TypeError("file is required");if(o.gzip)throw new TypeError("cannot append to compressed archives");if(!t||!Array.isArray(t)||!t.length)throw new TypeError("no files or directories specified");return t=Array.from(t),s(o),r(o,t,i)}),e=>{const t=e.filter;e.mtimeCache||(e.mtimeCache=new Map),e.filter=t?(i,n)=>t(i,n)&&!(e.mtimeCache.get(i)>n.mtime):(t,i)=>!(e.mtimeCache.get(t)>i.mtime)})},function(e,t,i){"use strict";const n=i(8),r=i(39),s=i(2),o=i(11),a=i(0),c=(e.exports=((e,t,i)=>{"function"==typeof e?(i=e,t=null,e={}):Array.isArray(e)&&(t=e,e={}),"function"==typeof t&&(i=t,t=null),t=t?Array.from(t):[];const r=n(e);if(r.sync&&"function"==typeof i)throw new TypeError("callback not supported for sync tar functions");if(!r.file&&"function"==typeof i)throw new TypeError("callback only supported with file option");return t.length&&c(r,t),r.file&&r.sync?h(r):r.file?l(r,i):r.sync?u(r):f(r)}),(e,t)=>{const i=new Map(t.map(e=>[e.replace(/\/+$/,""),!0])),n=e.filter,r=(e,t)=>{const n=t||a.parse(e).root||".",s=e!==n&&(i.has(e)?i.get(e):r(a.dirname(e),n));return i.set(e,s),s};e.filter=n?(e,t)=>n(e,t)&&r(e.replace(/\/+$/,"")):e=>r(e.replace(/\/+$/,""))}),h=e=>{const t=new r.Sync(e),i=e.file;const n=s.statSync(i),a=e.maxReadSize||16777216;new o.ReadStreamSync(i,{readSize:a,size:n.size}).pipe(t)},l=(e,t)=>{const i=new r(e),n=e.maxReadSize||16777216,a=e.file,c=new Promise((e,t)=>{i.on("error",t),i.on("close",e),s.stat(a,(e,r)=>{if(e)t(e);else{const e=new o.ReadStream(a,{readSize:n,size:r.size});e.on("error",t),e.pipe(i)}})});return t?c.then(t,t):c},u=e=>new r.Sync(e),f=e=>new r(e)},function(e,t,i){"use strict";const n=i(94),r=i(2),s=i(0),o=i(95);class a extends Error{constructor(e,t){super("Cannot extract through symbolic link"),this.path=t,this.symlink=e}get name(){return"SylinkError"}}class c extends Error{constructor(e,t){super(t+": Cannot cd into '"+e+"'"),this.path=e,this.code=t}get name(){return"CwdError"}}e.exports=((e,t,i)=>{const a=t.umask,l=448|t.mode,u=0!=(l&a),f=t.uid,d=t.gid,p="number"==typeof f&&"number"==typeof d&&(f!==t.processUid||d!==t.processGid),m=t.preserve,y=t.unlink,b=t.cache,w=t.cwd,g=(t,n)=>{t?i(t):(b.set(e,!0),n&&p?o(n,f,d,e=>g(e)):u?r.chmod(e,l,i):i())};if(b&&!0===b.get(e))return g();if(e===w)return r.lstat(e,(t,i)=>{!t&&i.isDirectory()||(t=new c(e,t&&t.code||"ENOTDIR")),g(t)});if(m)return n(e,l,g);const v=s.relative(w,e).split(/\/|\\/);h(w,v,l,b,y,w,null,g)});const h=(e,t,i,n,s,o,a,c)=>{if(!t.length)return c(null,a);const u=e+"/"+t.shift();if(n.get(u))return h(u,t,i,n,s,o,a,c);r.mkdir(u,i,l(u,t,i,n,s,o,a,c))},l=(e,t,i,n,o,u,f,d)=>p=>{if(p){if(p.path&&s.dirname(p.path)===u&&("ENOTDIR"===p.code||"ENOENT"===p.code))return d(new c(u,p.code));r.lstat(e,(s,c)=>{if(s)d(s);else if(c.isDirectory())h(e,t,i,n,o,u,f,d);else if(o)r.unlink(e,s=>{if(s)return d(s);r.mkdir(e,i,l(e,t,i,n,o,u,f,d))});else{if(c.isSymbolicLink())return d(new a(e,e+"/"+t.join("/")));d(p)}})}else h(e,t,i,n,o,u,f=f||e,d)};e.exports.sync=((e,t)=>{const i=t.umask,h=448|t.mode,l=0!=(h&i),u=t.uid,f=t.gid,d="number"==typeof u&&"number"==typeof f&&(u!==t.processUid||f!==t.processGid),p=t.preserve,m=t.unlink,y=t.cache,b=t.cwd,w=t=>{y.set(e,!0),t&&d&&o.sync(t,u,f),l&&r.chmodSync(e,h)};if(y&&!0===y.get(e))return w();if(e===b){let t=!1,i="ENOTDIR";try{t=r.lstatSync(e).isDirectory()}catch(e){i=e.code}finally{if(!t)throw new c(e,i)}return void w()}if(p)return w(n.sync(e,h));const g=s.relative(b,e).split(/\/|\\/);let v=null;for(let e=g.shift(),t=b;e&&(t+="/"+e);e=g.shift())if(!y.get(t))try{r.mkdirSync(t,h),v=v||t,y.set(t,!0)}catch(e){if(e.path&&s.dirname(e.path)===b&&("ENOTDIR"===e.code||"ENOENT"===e.code))return new c(b,e.code);const i=r.lstatSync(t);if(i.isDirectory()){y.set(t,!0);continue}if(m){r.unlinkSync(t),r.mkdirSync(t,h),v=v||t,y.set(t,!0);continue}if(i.isSymbolicLink())return new a(t,t+"/"+g.join("/"))}return w(v)})},function(e,t,i){var n=i(0),r=i(2),s=parseInt("0777",8);function o(e,t,i,a){"function"==typeof t?(i=t,t={}):t&&"object"==typeof t||(t={mode:t});var c=t.mode,h=t.fs||r;void 0===c&&(c=s&~process.umask()),a||(a=null);var l=i||function(){};e=n.resolve(e),h.mkdir(e,c,function(i){if(!i)return l(null,a=a||e);switch(i.code){case"ENOENT":o(n.dirname(e),t,function(i,n){i?l(i,n):o(e,t,l,n)});break;default:h.stat(e,function(e,t){e||!t.isDirectory()?l(i,a):l(null,a)})}})}e.exports=o.mkdirp=o.mkdirP=o,o.sync=function e(t,i,o){i&&"object"==typeof i||(i={mode:i});var a=i.mode,c=i.fs||r;void 0===a&&(a=s&~process.umask()),o||(o=null),t=n.resolve(t);try{c.mkdirSync(t,a),o=o||t}catch(r){switch(r.code){case"ENOENT":e(t,i,o=e(n.dirname(t),i,o));break;default:var h;try{h=c.statSync(t)}catch(e){throw r}if(!h.isDirectory())throw r}}return o}},function(e,t,i){e.exports=s,s.sync=function e(t,i,s){var o;try{o=n.readdirSync(t)}catch(e){if(e&&"ENOTDIR"===e.code)return n.chownSync(t,i,s);throw e}if(!o.length)return n.chownSync(t,i,s);o.forEach(function(o){var a=r.resolve(t,o),c=n.lstatSync(a);c.isSymbolicLink()||e(a,i,s)});return n.chownSync(t,i,s)};var n=i(2),r=i(0);function s(e,t,i,o){n.readdir(e,function(a,c){if(a&&"ENOTDIR"!==a.code)return o(a);if(a||!c.length)return n.chown(e,t,i,o);var h=c.length,l=null;function u(r){if(!l)return r?o(l=r):0==--h?n.chown(e,t,i,o):void 0}c.forEach(function(a){var c=r.resolve(e,a);n.lstat(c,function(e,n){if(e)return o(e);n.isSymbolicLink()?u():s(c,t,i,u)})})})}},function(e,t){e.exports=require("crypto")},function(e,t){e.exports=require("vm")},function(e,t){e.exports=require("module")},function(e,t,i){"use strict";Object.defineProperty(t,"__esModule",{value:!0});const n=i(100);t.InputOutput=class{constructor(){this.debug=!1,this.stdio=new n.SyncStdio}write(e){const t=JSON.stringify(e);this.stdio.writeLine(t),this.debug&&this.stdio.writeErrorLine("< "+t)}read(){let e=this.stdio.readLine();if(!e)return;if(0===e.indexOf("< "))return this.read();0===e.indexOf("> ")&&(e=e.substr(2));const t=JSON.parse(e);return this.debug&&this.stdio.writeErrorLine("> "+JSON.stringify(t)),t}}},function(e,t,i){"use strict";Object.defineProperty(t,"__esModule",{value:!0});const n=i(2),r=0,s=1,o=2,a=1048576;t.SyncStdio=class{constructor(){this.inputQueue=new Array,this.currentLine=""}writeErrorLine(e){this.writeBuffer(Buffer.from(`${e}\n`),o)}writeLine(e){this.writeBuffer(Buffer.from(`${e}\n`),s)}readLine(){if(this.inputQueue.length>0)return this.inputQueue.shift();const e=Buffer.alloc(a),t=n.readSync(r,e,0,e.length,null);if(0===t)return;const i=e.slice(0,t).toString();for(let e=0;e {\n arguments[arguments.length] = (err, res) => {\n if (err) return reject(err)\n resolve(res)\n }\n arguments.length++\n fn.apply(this, arguments)\n })\n }\n }, 'name', { value: fn.name })\n}\n\nexports.fromPromise = function (fn) {\n return Object.defineProperty(function () {\n const cb = arguments[arguments.length - 1]\n if (typeof cb !== 'function') return fn.apply(this, arguments)\n else fn.apply(this, arguments).then(r => cb(null, r), cb)\n }, 'name', { value: fn.name })\n}\n","'use strict'\nconst u = require('universalify').fromCallback\nconst mkdirs = u(require('./mkdirs'))\nconst mkdirsSync = require('./mkdirs-sync')\n\nmodule.exports = {\n mkdirs: mkdirs,\n mkdirsSync: mkdirsSync,\n // alias\n mkdirp: mkdirs,\n mkdirpSync: mkdirsSync,\n ensureDir: mkdirs,\n ensureDirSync: mkdirsSync\n}\n","'use strict'\nconst u = require('universalify').fromPromise\nconst fs = require('../fs')\n\nfunction pathExists (path) {\n return fs.access(path).then(() => true).catch(() => false)\n}\n\nmodule.exports = {\n pathExists: u(pathExists),\n pathExistsSync: fs.existsSync\n}\n","'use strict';\n\nvar uri = require('url');\n\nvar ValidationError = exports.ValidationError = function ValidationError (message, instance, schema, propertyPath, name, argument) {\n if (propertyPath) {\n this.property = propertyPath;\n }\n if (message) {\n this.message = message;\n }\n if (schema) {\n if (schema.id) {\n this.schema = schema.id;\n } else {\n this.schema = schema;\n }\n }\n if (instance) {\n this.instance = instance;\n }\n this.name = name;\n this.argument = argument;\n this.stack = this.toString();\n};\n\nValidationError.prototype.toString = function toString() {\n return this.property + ' ' + this.message;\n};\n\nvar ValidatorResult = exports.ValidatorResult = function ValidatorResult(instance, schema, options, ctx) {\n this.instance = instance;\n this.schema = schema;\n this.propertyPath = ctx.propertyPath;\n this.errors = [];\n this.throwError = options && options.throwError;\n this.disableFormat = options && options.disableFormat === true;\n};\n\nValidatorResult.prototype.addError = function addError(detail) {\n var err;\n if (typeof detail == 'string') {\n err = new ValidationError(detail, this.instance, this.schema, this.propertyPath);\n } else {\n if (!detail) throw new Error('Missing error detail');\n if (!detail.message) throw new Error('Missing error message');\n if (!detail.name) throw new Error('Missing validator type');\n err = new ValidationError(detail.message, this.instance, this.schema, this.propertyPath, detail.name, detail.argument);\n }\n\n if (this.throwError) {\n throw err;\n }\n this.errors.push(err);\n return err;\n};\n\nValidatorResult.prototype.importErrors = function importErrors(res) {\n if (typeof res == 'string' || (res && res.validatorType)) {\n this.addError(res);\n } else if (res && res.errors) {\n Array.prototype.push.apply(this.errors, res.errors);\n }\n};\n\nfunction stringizer (v,i){\n return i+': '+v.toString()+'\\n';\n}\nValidatorResult.prototype.toString = function toString(res) {\n return this.errors.map(stringizer).join('');\n};\n\nObject.defineProperty(ValidatorResult.prototype, \"valid\", { get: function() {\n return !this.errors.length;\n} });\n\n/**\n * Describes a problem with a Schema which prevents validation of an instance\n * @name SchemaError\n * @constructor\n */\nvar SchemaError = exports.SchemaError = function SchemaError (msg, schema) {\n this.message = msg;\n this.schema = schema;\n Error.call(this, msg);\n Error.captureStackTrace(this, SchemaError);\n};\nSchemaError.prototype = Object.create(Error.prototype,\n { constructor: {value: SchemaError, enumerable: false}\n , name: {value: 'SchemaError', enumerable: false}\n });\n\nvar SchemaContext = exports.SchemaContext = function SchemaContext (schema, options, propertyPath, base, schemas) {\n this.schema = schema;\n this.options = options;\n this.propertyPath = propertyPath;\n this.base = base;\n this.schemas = schemas;\n};\n\nSchemaContext.prototype.resolve = function resolve (target) {\n return uri.resolve(this.base, target);\n};\n\nSchemaContext.prototype.makeChild = function makeChild(schema, propertyName){\n var propertyPath = (propertyName===undefined) ? this.propertyPath : this.propertyPath+makeSuffix(propertyName);\n var base = uri.resolve(this.base, schema.id||'');\n var ctx = new SchemaContext(schema, this.options, propertyPath, base, Object.create(this.schemas));\n if(schema.id && !ctx.schemas[base]){\n ctx.schemas[base] = schema;\n }\n return ctx;\n}\n\nvar FORMAT_REGEXPS = exports.FORMAT_REGEXPS = {\n 'date-time': /^\\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])[tT ](2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])(\\.\\d+)?([zZ]|[+-]([0-5][0-9]):(60|[0-5][0-9]))$/,\n 'date': /^\\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])$/,\n 'time': /^(2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])$/,\n\n 'email': /^(?:[\\w\\!\\#\\$\\%\\&\\'\\*\\+\\-\\/\\=\\?\\^\\`\\{\\|\\}\\~]+\\.)*[\\w\\!\\#\\$\\%\\&\\'\\*\\+\\-\\/\\=\\?\\^\\`\\{\\|\\}\\~]+@(?:(?:(?:[a-zA-Z0-9](?:[a-zA-Z0-9\\-](?!\\.)){0,61}[a-zA-Z0-9]?\\.)+[a-zA-Z0-9](?:[a-zA-Z0-9\\-](?!$)){0,61}[a-zA-Z0-9]?)|(?:\\[(?:(?:[01]?\\d{1,2}|2[0-4]\\d|25[0-5])\\.){3}(?:[01]?\\d{1,2}|2[0-4]\\d|25[0-5])\\]))$/,\n 'ip-address': /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/,\n 'ipv6': /^\\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:)))(%.+)?\\s*$/,\n 'uri': /^[a-zA-Z][a-zA-Z0-9+-.]*:[^\\s]*$/,\n\n 'color': /^(#?([0-9A-Fa-f]{3}){1,2}\\b|aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow|(rgb\\(\\s*\\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\b\\s*,\\s*\\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\b\\s*,\\s*\\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\b\\s*\\))|(rgb\\(\\s*(\\d?\\d%|100%)+\\s*,\\s*(\\d?\\d%|100%)+\\s*,\\s*(\\d?\\d%|100%)+\\s*\\)))$/,\n\n // hostname regex from: http://stackoverflow.com/a/1420225/5628\n 'hostname': /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\\.?$/,\n 'host-name': /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\\.?$/,\n\n 'alpha': /^[a-zA-Z]+$/,\n 'alphanumeric': /^[a-zA-Z0-9]+$/,\n 'utc-millisec': function (input) {\n return (typeof input === 'string') && parseFloat(input) === parseInt(input, 10) && !isNaN(input);\n },\n 'regex': function (input) {\n var result = true;\n try {\n new RegExp(input);\n } catch (e) {\n result = false;\n }\n return result;\n },\n 'style': /\\s*(.+?):\\s*([^;]+);?/g,\n 'phone': /^\\+(?:[0-9] ?){6,14}[0-9]$/\n};\n\nFORMAT_REGEXPS.regexp = FORMAT_REGEXPS.regex;\nFORMAT_REGEXPS.pattern = FORMAT_REGEXPS.regex;\nFORMAT_REGEXPS.ipv4 = FORMAT_REGEXPS['ip-address'];\n\nexports.isFormat = function isFormat (input, format, validator) {\n if (typeof input === 'string' && FORMAT_REGEXPS[format] !== undefined) {\n if (FORMAT_REGEXPS[format] instanceof RegExp) {\n return FORMAT_REGEXPS[format].test(input);\n }\n if (typeof FORMAT_REGEXPS[format] === 'function') {\n return FORMAT_REGEXPS[format](input);\n }\n } else if (validator && validator.customFormats &&\n typeof validator.customFormats[format] === 'function') {\n return validator.customFormats[format](input);\n }\n return true;\n};\n\nvar makeSuffix = exports.makeSuffix = function makeSuffix (key) {\n key = key.toString();\n // This function could be capable of outputting valid a ECMAScript string, but the\n // resulting code for testing which form to use would be tens of thousands of characters long\n // That means this will use the name form for some illegal forms\n if (!key.match(/[.\\s\\[\\]]/) && !key.match(/^[\\d]/)) {\n return '.' + key;\n }\n if (key.match(/^\\d+$/)) {\n return '[' + key + ']';\n }\n return '[' + JSON.stringify(key) + ']';\n};\n\nexports.deepCompareStrict = function deepCompareStrict (a, b) {\n if (typeof a !== typeof b) {\n return false;\n }\n if (a instanceof Array) {\n if (!(b instanceof Array)) {\n return false;\n }\n if (a.length !== b.length) {\n return false;\n }\n return a.every(function (v, i) {\n return deepCompareStrict(a[i], b[i]);\n });\n }\n if (typeof a === 'object') {\n if (!a || !b) {\n return a === b;\n }\n var aKeys = Object.keys(a);\n var bKeys = Object.keys(b);\n if (aKeys.length !== bKeys.length) {\n return false;\n }\n return aKeys.every(function (v) {\n return deepCompareStrict(a[v], b[v]);\n });\n }\n return a === b;\n};\n\nfunction deepMerger (target, dst, e, i) {\n if (typeof e === 'object') {\n dst[i] = deepMerge(target[i], e)\n } else {\n if (target.indexOf(e) === -1) {\n dst.push(e)\n }\n }\n}\n\nfunction copyist (src, dst, key) {\n dst[key] = src[key];\n}\n\nfunction copyistWithDeepMerge (target, src, dst, key) {\n if (typeof src[key] !== 'object' || !src[key]) {\n dst[key] = src[key];\n }\n else {\n if (!target[key]) {\n dst[key] = src[key];\n } else {\n dst[key] = deepMerge(target[key], src[key])\n }\n }\n}\n\nfunction deepMerge (target, src) {\n var array = Array.isArray(src);\n var dst = array && [] || {};\n\n if (array) {\n target = target || [];\n dst = dst.concat(target);\n src.forEach(deepMerger.bind(null, target, dst));\n } else {\n if (target && typeof target === 'object') {\n Object.keys(target).forEach(copyist.bind(null, target, dst));\n }\n Object.keys(src).forEach(copyistWithDeepMerge.bind(null, target, src, dst));\n }\n\n return dst;\n};\n\nmodule.exports.deepMerge = deepMerge;\n\n/**\n * Validates instance against the provided schema\n * Implements URI+JSON Pointer encoding, e.g. \"%7e\"=\"~0\"=>\"~\", \"~1\"=\"%2f\"=>\"/\"\n * @param o\n * @param s The path to walk o along\n * @return any\n */\nexports.objectGetPath = function objectGetPath(o, s) {\n var parts = s.split('/').slice(1);\n var k;\n while (typeof (k=parts.shift()) == 'string') {\n var n = decodeURIComponent(k.replace(/~0/,'~').replace(/~1/g,'/'));\n if (!(n in o)) return;\n o = o[n];\n }\n return o;\n};\n\nfunction pathEncoder (v) {\n return '/'+encodeURIComponent(v).replace(/~/g,'%7E');\n}\n/**\n * Accept an Array of property names and return a JSON Pointer URI fragment\n * @param Array a\n * @return {String}\n */\nexports.encodePath = function encodePointer(a){\n\t// ~ must be encoded explicitly because hacks\n\t// the slash is encoded by encodeURIComponent\n\treturn a.map(pathEncoder).join('');\n};\n\n\n/**\n * Calculate the number of decimal places a number uses\n * We need this to get correct results out of multipleOf and divisibleBy\n * when either figure is has decimal places, due to IEEE-754 float issues.\n * @param number\n * @returns {number}\n */\nexports.getDecimalPlaces = function getDecimalPlaces(number) {\n\n var decimalPlaces = 0;\n if (isNaN(number)) return decimalPlaces;\n\n if (typeof number !== 'number') {\n number = Number(number);\n }\n\n var parts = number.toString().split('e');\n if (parts.length === 2) {\n if (parts[1][0] !== '-') {\n return decimalPlaces;\n } else {\n decimalPlaces = Number(parts[1].slice(1));\n }\n }\n\n var decimalParts = parts[0].split('.');\n if (decimalParts.length === 2) {\n decimalPlaces += decimalParts[1].length;\n }\n\n return decimalPlaces;\n};\n\n","'use strict'\n\n// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from\n// or Buffer.alloc, and Buffer in node 10 deprecated the ctor.\n// .M, this is fine .\\^/M..\nlet B = Buffer\n/* istanbul ignore next */\nif (!B.alloc) {\n B = require('safe-buffer').Buffer\n}\nmodule.exports = B\n","'use strict'\n\n// turn tar(1) style args like `C` into the more verbose things like `cwd`\n\nconst argmap = new Map([\n ['C', 'cwd'],\n ['f', 'file'],\n ['z', 'gzip'],\n ['P', 'preservePaths'],\n ['U', 'unlink'],\n ['strip-components', 'strip'],\n ['stripComponents', 'strip'],\n ['keep-newer', 'newer'],\n ['keepNewer', 'newer'],\n ['keep-newer-files', 'newer'],\n ['keepNewerFiles', 'newer'],\n ['k', 'keep'],\n ['keep-existing', 'keep'],\n ['keepExisting', 'keep'],\n ['m', 'noMtime'],\n ['no-mtime', 'noMtime'],\n ['p', 'preserveOwner'],\n ['L', 'follow'],\n ['h', 'follow']\n])\n\nconst parse = module.exports = opt => opt ? Object.keys(opt).map(k => [\n argmap.has(k) ? argmap.get(k) : k, opt[k]\n]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}\n","'use strict'\nconst EE = require('events')\nconst Yallist = require('yallist')\nconst EOF = Symbol('EOF')\nconst MAYBE_EMIT_END = Symbol('maybeEmitEnd')\nconst EMITTED_END = Symbol('emittedEnd')\nconst CLOSED = Symbol('closed')\nconst READ = Symbol('read')\nconst FLUSH = Symbol('flush')\nconst doIter = process.env._MP_NO_ITERATOR_SYMBOLS_ !== '1'\nconst ASYNCITERATOR = doIter && Symbol.asyncIterator || Symbol('asyncIterator not implemented')\nconst ITERATOR = doIter && Symbol.iterator || Symbol('iterator not implemented')\nconst FLUSHCHUNK = Symbol('flushChunk')\nconst SD = require('string_decoder').StringDecoder\nconst ENCODING = Symbol('encoding')\nconst DECODER = Symbol('decoder')\nconst FLOWING = Symbol('flowing')\nconst RESUME = Symbol('resume')\nconst BUFFERLENGTH = Symbol('bufferLength')\nconst BUFFERPUSH = Symbol('bufferPush')\nconst BUFFERSHIFT = Symbol('bufferShift')\nconst OBJECTMODE = Symbol('objectMode')\n\n// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from\n// or Buffer.alloc, and Buffer in node 10 deprecated the ctor.\n// .M, this is fine .\\^/M..\nlet B = Buffer\n/* istanbul ignore next */\nif (!B.alloc) {\n B = require('safe-buffer').Buffer\n}\n\nmodule.exports = class MiniPass extends EE {\n constructor (options) {\n super()\n this[FLOWING] = false\n this.pipes = new Yallist()\n this.buffer = new Yallist()\n this[OBJECTMODE] = options && options.objectMode || false\n if (this[OBJECTMODE])\n this[ENCODING] = null\n else\n this[ENCODING] = options && options.encoding || null\n if (this[ENCODING] === 'buffer')\n this[ENCODING] = null\n this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null\n this[EOF] = false\n this[EMITTED_END] = false\n this[CLOSED] = false\n this.writable = true\n this.readable = true\n this[BUFFERLENGTH] = 0\n }\n\n get bufferLength () { return this[BUFFERLENGTH] }\n\n get encoding () { return this[ENCODING] }\n set encoding (enc) {\n if (this[OBJECTMODE])\n throw new Error('cannot set encoding in objectMode')\n\n if (this[ENCODING] && enc !== this[ENCODING] &&\n (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))\n throw new Error('cannot change encoding')\n\n if (this[ENCODING] !== enc) {\n this[DECODER] = enc ? new SD(enc) : null\n if (this.buffer.length)\n this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))\n }\n\n this[ENCODING] = enc\n }\n\n setEncoding (enc) {\n this.encoding = enc\n }\n\n write (chunk, encoding, cb) {\n if (this[EOF])\n throw new Error('write after end')\n\n if (typeof encoding === 'function')\n cb = encoding, encoding = 'utf8'\n\n if (!encoding)\n encoding = 'utf8'\n\n // fast-path writing strings of same encoding to a stream with\n // an empty buffer, skipping the buffer/decoder dance\n if (typeof chunk === 'string' && !this[OBJECTMODE] &&\n // unless it is a string already ready for us to use\n !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {\n chunk = B.from(chunk, encoding)\n }\n\n if (B.isBuffer(chunk) && this[ENCODING])\n chunk = this[DECODER].write(chunk)\n\n try {\n return this.flowing\n ? (this.emit('data', chunk), this.flowing)\n : (this[BUFFERPUSH](chunk), false)\n } finally {\n this.emit('readable')\n if (cb)\n cb()\n }\n }\n\n read (n) {\n try {\n if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])\n return null\n\n if (this[OBJECTMODE])\n n = null\n\n if (this.buffer.length > 1 && !this[OBJECTMODE]) {\n if (this.encoding)\n this.buffer = new Yallist([\n Array.from(this.buffer).join('')\n ])\n else\n this.buffer = new Yallist([\n B.concat(Array.from(this.buffer), this[BUFFERLENGTH])\n ])\n }\n\n return this[READ](n || null, this.buffer.head.value)\n } finally {\n this[MAYBE_EMIT_END]()\n }\n }\n\n [READ] (n, chunk) {\n if (n === chunk.length || n === null)\n this[BUFFERSHIFT]()\n else {\n this.buffer.head.value = chunk.slice(n)\n chunk = chunk.slice(0, n)\n this[BUFFERLENGTH] -= n\n }\n\n this.emit('data', chunk)\n\n if (!this.buffer.length && !this[EOF])\n this.emit('drain')\n\n return chunk\n }\n\n end (chunk, encoding, cb) {\n if (typeof chunk === 'function')\n cb = chunk, chunk = null\n if (typeof encoding === 'function')\n cb = encoding, encoding = 'utf8'\n if (chunk)\n this.write(chunk, encoding)\n if (cb)\n this.once('end', cb)\n this[EOF] = true\n this.writable = false\n if (this.flowing)\n this[MAYBE_EMIT_END]()\n }\n\n // don't let the internal resume be overwritten\n [RESUME] () {\n this[FLOWING] = true\n this.emit('resume')\n if (this.buffer.length)\n this[FLUSH]()\n else if (this[EOF])\n this[MAYBE_EMIT_END]()\n else\n this.emit('drain')\n }\n\n resume () {\n return this[RESUME]()\n }\n\n pause () {\n this[FLOWING] = false\n }\n\n get flowing () {\n return this[FLOWING]\n }\n\n [BUFFERPUSH] (chunk) {\n if (this[OBJECTMODE])\n this[BUFFERLENGTH] += 1\n else\n this[BUFFERLENGTH] += chunk.length\n return this.buffer.push(chunk)\n }\n\n [BUFFERSHIFT] () {\n if (this.buffer.length) {\n if (this[OBJECTMODE])\n this[BUFFERLENGTH] -= 1\n else\n this[BUFFERLENGTH] -= this.buffer.head.value.length\n }\n return this.buffer.shift()\n }\n\n [FLUSH] () {\n do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))\n\n if (!this.buffer.length && !this[EOF])\n this.emit('drain')\n }\n\n [FLUSHCHUNK] (chunk) {\n return chunk ? (this.emit('data', chunk), this.flowing) : false\n }\n\n pipe (dest, opts) {\n if (dest === process.stdout || dest === process.stderr)\n (opts = opts || {}).end = false\n const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }\n this.pipes.push(p)\n\n dest.on('drain', p.ondrain)\n this[RESUME]()\n return dest\n }\n\n addListener (ev, fn) {\n return this.on(ev, fn)\n }\n\n on (ev, fn) {\n try {\n return super.on(ev, fn)\n } finally {\n if (ev === 'data' && !this.pipes.length && !this.flowing)\n this[RESUME]()\n else if (ev === 'end' && this[EMITTED_END]) {\n super.emit('end')\n this.removeAllListeners('end')\n }\n }\n }\n\n get emittedEnd () {\n return this[EMITTED_END]\n }\n\n [MAYBE_EMIT_END] () {\n if (!this[EMITTED_END] && this.buffer.length === 0 && this[EOF]) {\n this.emit('end')\n this.emit('prefinish')\n this.emit('finish')\n if (this[CLOSED])\n this.emit('close')\n }\n }\n\n emit (ev, data) {\n if (ev === 'data') {\n if (!data)\n return\n\n if (this.pipes.length)\n this.pipes.forEach(p => p.dest.write(data) || this.pause())\n } else if (ev === 'end') {\n if (this[EMITTED_END] === true)\n return\n\n this[EMITTED_END] = true\n this.readable = false\n\n if (this[DECODER]) {\n data = this[DECODER].end()\n if (data) {\n this.pipes.forEach(p => p.dest.write(data))\n super.emit('data', data)\n }\n }\n\n this.pipes.forEach(p => {\n p.dest.removeListener('drain', p.ondrain)\n if (!p.opts || p.opts.end !== false)\n p.dest.end()\n })\n } else if (ev === 'close') {\n this[CLOSED] = true\n // don't emit close before 'end' and 'finish'\n if (!this[EMITTED_END])\n return\n }\n\n const args = new Array(arguments.length)\n args[0] = ev\n args[1] = data\n if (arguments.length > 2) {\n for (let i = 2; i < arguments.length; i++) {\n args[i] = arguments[i]\n }\n }\n\n try {\n return super.emit.apply(this, args)\n } finally {\n if (ev !== 'end')\n this[MAYBE_EMIT_END]()\n else\n this.removeAllListeners('end')\n }\n }\n\n // const all = await stream.collect()\n collect () {\n return new Promise((resolve, reject) => {\n const buf = []\n this.on('data', c => buf.push(c))\n this.on('end', () => resolve(buf))\n this.on('error', reject)\n })\n }\n\n // for await (let chunk of stream)\n [ASYNCITERATOR] () {\n const next = () => {\n const res = this.read()\n if (res !== null)\n return Promise.resolve({ done: false, value: res })\n\n if (this[EOF])\n return Promise.resolve({ done: true })\n\n let resolve = null\n let reject = null\n const onerr = er => {\n this.removeListener('data', ondata)\n this.removeListener('end', onend)\n reject(er)\n }\n const ondata = value => {\n this.removeListener('error', onerr)\n this.removeListener('end', onend)\n this.pause()\n resolve({ value: value, done: !!this[EOF] })\n }\n const onend = () => {\n this.removeListener('error', onerr)\n this.removeListener('data', ondata)\n resolve({ done: true })\n }\n return new Promise((res, rej) => {\n reject = rej\n resolve = res\n this.once('error', onerr)\n this.once('end', onend)\n this.once('data', ondata)\n this.resume()\n })\n }\n\n return { next }\n }\n\n // for (let chunk of stream)\n [ITERATOR] () {\n const next = () => {\n const value = this.read()\n const done = value === null\n return { value, done }\n }\n return { next }\n }\n}\n","'use strict'\n// parse a 512-byte header block to a data object, or vice-versa\n// encode returns `true` if a pax extended header is needed, because\n// the data could not be faithfully encoded in a simple header.\n// (Also, check header.needPax to see if it needs a pax header.)\n\nconst Buffer = require('./buffer.js')\nconst types = require('./types.js')\nconst pathModule = require('path').posix\nconst large = require('./large-numbers.js')\n\nconst SLURP = Symbol('slurp')\nconst TYPE = Symbol('type')\n\nclass Header {\n constructor (data, off, ex, gex) {\n this.cksumValid = false\n this.needPax = false\n this.nullBlock = false\n\n this.block = null\n this.path = null\n this.mode = null\n this.uid = null\n this.gid = null\n this.size = null\n this.mtime = null\n this.cksum = null\n this[TYPE] = '0'\n this.linkpath = null\n this.uname = null\n this.gname = null\n this.devmaj = 0\n this.devmin = 0\n this.atime = null\n this.ctime = null\n\n if (Buffer.isBuffer(data))\n this.decode(data, off || 0, ex, gex)\n else if (data)\n this.set(data)\n }\n\n decode (buf, off, ex, gex) {\n if (!off)\n off = 0\n\n if (!buf || !(buf.length >= off + 512))\n throw new Error('need 512 bytes for header')\n\n this.path = decString(buf, off, 100)\n this.mode = decNumber(buf, off + 100, 8)\n this.uid = decNumber(buf, off + 108, 8)\n this.gid = decNumber(buf, off + 116, 8)\n this.size = decNumber(buf, off + 124, 12)\n this.mtime = decDate(buf, off + 136, 12)\n this.cksum = decNumber(buf, off + 148, 12)\n\n // if we have extended or global extended headers, apply them now\n // See https://github.com/npm/node-tar/pull/187\n this[SLURP](ex)\n this[SLURP](gex, true)\n\n // old tar versions marked dirs as a file with a trailing /\n this[TYPE] = decString(buf, off + 156, 1)\n if (this[TYPE] === '')\n this[TYPE] = '0'\n if (this[TYPE] === '0' && this.path.substr(-1) === '/')\n this[TYPE] = '5'\n\n // tar implementations sometimes incorrectly put the stat(dir).size\n // as the size in the tarball, even though Directory entries are\n // not able to have any body at all. In the very rare chance that\n // it actually DOES have a body, we weren't going to do anything with\n // it anyway, and it'll just be a warning about an invalid header.\n if (this[TYPE] === '5')\n this.size = 0\n\n this.linkpath = decString(buf, off + 157, 100)\n if (buf.slice(off + 257, off + 265).toString() === 'ustar\\u000000') {\n this.uname = decString(buf, off + 265, 32)\n this.gname = decString(buf, off + 297, 32)\n this.devmaj = decNumber(buf, off + 329, 8)\n this.devmin = decNumber(buf, off + 337, 8)\n if (buf[off + 475] !== 0) {\n // definitely a prefix, definitely >130 chars.\n const prefix = decString(buf, off + 345, 155)\n this.path = prefix + '/' + this.path\n } else {\n const prefix = decString(buf, off + 345, 130)\n if (prefix)\n this.path = prefix + '/' + this.path\n this.atime = decDate(buf, off + 476, 12)\n this.ctime = decDate(buf, off + 488, 12)\n }\n }\n\n let sum = 8 * 0x20\n for (let i = off; i < off + 148; i++) {\n sum += buf[i]\n }\n for (let i = off + 156; i < off + 512; i++) {\n sum += buf[i]\n }\n this.cksumValid = sum === this.cksum\n if (this.cksum === null && sum === 8 * 0x20)\n this.nullBlock = true\n }\n\n [SLURP] (ex, global) {\n for (let k in ex) {\n // we slurp in everything except for the path attribute in\n // a global extended header, because that's weird.\n if (ex[k] !== null && ex[k] !== undefined &&\n !(global && k === 'path'))\n this[k] = ex[k]\n }\n }\n\n encode (buf, off) {\n if (!buf) {\n buf = this.block = Buffer.alloc(512)\n off = 0\n }\n\n if (!off)\n off = 0\n\n if (!(buf.length >= off + 512))\n throw new Error('need 512 bytes for header')\n\n const prefixSize = this.ctime || this.atime ? 130 : 155\n const split = splitPrefix(this.path || '', prefixSize)\n const path = split[0]\n const prefix = split[1]\n this.needPax = split[2]\n\n this.needPax = encString(buf, off, 100, path) || this.needPax\n this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax\n this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax\n this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax\n this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax\n this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax\n buf[off + 156] = this[TYPE].charCodeAt(0)\n this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax\n buf.write('ustar\\u000000', off + 257, 8)\n this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax\n this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax\n this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax\n this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax\n this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax\n if (buf[off + 475] !== 0)\n this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax\n else {\n this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax\n this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax\n this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax\n }\n\n let sum = 8 * 0x20\n for (let i = off; i < off + 148; i++) {\n sum += buf[i]\n }\n for (let i = off + 156; i < off + 512; i++) {\n sum += buf[i]\n }\n this.cksum = sum\n encNumber(buf, off + 148, 8, this.cksum)\n this.cksumValid = true\n\n return this.needPax\n }\n\n set (data) {\n for (let i in data) {\n if (data[i] !== null && data[i] !== undefined)\n this[i] = data[i]\n }\n }\n\n get type () {\n return types.name.get(this[TYPE]) || this[TYPE]\n }\n\n get typeKey () {\n return this[TYPE]\n }\n\n set type (type) {\n if (types.code.has(type))\n this[TYPE] = types.code.get(type)\n else\n this[TYPE] = type\n }\n}\n\nconst splitPrefix = (p, prefixSize) => {\n const pathSize = 100\n let pp = p\n let prefix = ''\n let ret\n const root = pathModule.parse(p).root || '.'\n\n if (Buffer.byteLength(pp) < pathSize)\n ret = [pp, prefix, false]\n else {\n // first set prefix to the dir, and path to the base\n prefix = pathModule.dirname(pp)\n pp = pathModule.basename(pp)\n\n do {\n // both fit!\n if (Buffer.byteLength(pp) <= pathSize &&\n Buffer.byteLength(prefix) <= prefixSize)\n ret = [pp, prefix, false]\n\n // prefix fits in prefix, but path doesn't fit in path\n else if (Buffer.byteLength(pp) > pathSize &&\n Buffer.byteLength(prefix) <= prefixSize)\n ret = [pp.substr(0, pathSize - 1), prefix, true]\n\n else {\n // make path take a bit from prefix\n pp = pathModule.join(pathModule.basename(prefix), pp)\n prefix = pathModule.dirname(prefix)\n }\n } while (prefix !== root && !ret)\n\n // at this point, found no resolution, just truncate\n if (!ret)\n ret = [p.substr(0, pathSize - 1), '', true]\n }\n return ret\n}\n\nconst decString = (buf, off, size) =>\n buf.slice(off, off + size).toString('utf8').replace(/\\0.*/, '')\n\nconst decDate = (buf, off, size) =>\n numToDate(decNumber(buf, off, size))\n\nconst numToDate = num => num === null ? null : new Date(num * 1000)\n\nconst decNumber = (buf, off, size) =>\n buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))\n : decSmallNumber(buf, off, size)\n\nconst nanNull = value => isNaN(value) ? null : value\n\nconst decSmallNumber = (buf, off, size) =>\n nanNull(parseInt(\n buf.slice(off, off + size)\n .toString('utf8').replace(/\\0.*$/, '').trim(), 8))\n\n// the maximum encodable as a null-terminated octal, by field size\nconst MAXNUM = {\n 12: 0o77777777777,\n 8 : 0o7777777\n}\n\nconst encNumber = (buf, off, size, number) =>\n number === null ? false :\n number > MAXNUM[size] || number < 0\n ? (large.encode(number, buf.slice(off, off + size)), true)\n : (encSmallNumber(buf, off, size, number), false)\n\nconst encSmallNumber = (buf, off, size, number) =>\n buf.write(octalString(number, size), off, size, 'ascii')\n\nconst octalString = (number, size) =>\n padOctal(Math.floor(number).toString(8), size)\n\nconst padOctal = (string, size) =>\n (string.length === size - 1 ? string\n : new Array(size - string.length - 1).join('0') + string + ' ') + '\\0'\n\nconst encDate = (buf, off, size, date) =>\n date === null ? false :\n encNumber(buf, off, size, date.getTime() / 1000)\n\n// enough to fill the longest string we've got\nconst NULLS = new Array(156).join('\\0')\n// pad with nulls, return true if it's longer or non-ascii\nconst encString = (buf, off, size, string) =>\n string === null ? false :\n (buf.write(string + NULLS, off, size, 'utf8'),\n string.length !== Buffer.byteLength(string) || string.length > size)\n\nmodule.exports = Header\n","'use strict'\nconst MiniPass = require('minipass')\nconst EE = require('events').EventEmitter\nconst fs = require('fs')\n\n// for writev\nconst binding = process.binding('fs')\nconst writeBuffers = binding.writeBuffers\nconst FSReqWrap = binding.FSReqWrap\n\nconst _autoClose = Symbol('_autoClose')\nconst _close = Symbol('_close')\nconst _ended = Symbol('_ended')\nconst _fd = Symbol('_fd')\nconst _finished = Symbol('_finished')\nconst _flags = Symbol('_flags')\nconst _flush = Symbol('_flush')\nconst _handleChunk = Symbol('_handleChunk')\nconst _makeBuf = Symbol('_makeBuf')\nconst _mode = Symbol('_mode')\nconst _needDrain = Symbol('_needDrain')\nconst _onerror = Symbol('_onerror')\nconst _onopen = Symbol('_onopen')\nconst _onread = Symbol('_onread')\nconst _onwrite = Symbol('_onwrite')\nconst _open = Symbol('_open')\nconst _path = Symbol('_path')\nconst _pos = Symbol('_pos')\nconst _queue = Symbol('_queue')\nconst _read = Symbol('_read')\nconst _readSize = Symbol('_readSize')\nconst _reading = Symbol('_reading')\nconst _remain = Symbol('_remain')\nconst _size = Symbol('_size')\nconst _write = Symbol('_write')\nconst _writing = Symbol('_writing')\nconst _defaultFlag = Symbol('_defaultFlag')\n\nclass ReadStream extends MiniPass {\n constructor (path, opt) {\n opt = opt || {}\n super(opt)\n\n this.writable = false\n\n if (typeof path !== 'string')\n throw new TypeError('path must be a string')\n\n this[_fd] = typeof opt.fd === 'number' ? opt.fd : null\n this[_path] = path\n this[_readSize] = opt.readSize || 16*1024*1024\n this[_reading] = false\n this[_size] = typeof opt.size === 'number' ? opt.size : Infinity\n this[_remain] = this[_size]\n this[_autoClose] = typeof opt.autoClose === 'boolean' ?\n opt.autoClose : true\n\n if (typeof this[_fd] === 'number')\n this[_read]()\n else\n this[_open]()\n }\n\n get fd () { return this[_fd] }\n get path () { return this[_path] }\n\n write () {\n throw new TypeError('this is a readable stream')\n }\n\n end () {\n throw new TypeError('this is a readable stream')\n }\n\n [_open] () {\n fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))\n }\n\n [_onopen] (er, fd) {\n if (er)\n this[_onerror](er)\n else {\n this[_fd] = fd\n this.emit('open', fd)\n this[_read]()\n }\n }\n\n [_makeBuf] () {\n return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))\n }\n\n [_read] () {\n if (!this[_reading]) {\n this[_reading] = true\n const buf = this[_makeBuf]()\n /* istanbul ignore if */\n if (buf.length === 0) return process.nextTick(() => this[_onread](null, 0, buf))\n fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>\n this[_onread](er, br, buf))\n }\n }\n\n [_onread] (er, br, buf) {\n this[_reading] = false\n if (er)\n this[_onerror](er)\n else if (this[_handleChunk](br, buf))\n this[_read]()\n }\n\n [_close] () {\n if (this[_autoClose] && typeof this[_fd] === 'number') {\n fs.close(this[_fd], _ => this.emit('close'))\n this[_fd] = null\n }\n }\n\n [_onerror] (er) {\n this[_reading] = true\n this[_close]()\n this.emit('error', er)\n }\n\n [_handleChunk] (br, buf) {\n let ret = false\n // no effect if infinite\n this[_remain] -= br\n if (br > 0)\n ret = super.write(br < buf.length ? buf.slice(0, br) : buf)\n\n if (br === 0 || this[_remain] <= 0) {\n ret = false\n this[_close]()\n super.end()\n }\n\n return ret\n }\n\n emit (ev, data) {\n switch (ev) {\n case 'prefinish':\n case 'finish':\n break\n\n case 'drain':\n if (typeof this[_fd] === 'number')\n this[_read]()\n break\n\n default:\n return super.emit(ev, data)\n }\n }\n}\n\nclass ReadStreamSync extends ReadStream {\n [_open] () {\n let threw = true\n try {\n this[_onopen](null, fs.openSync(this[_path], 'r'))\n threw = false\n } finally {\n if (threw)\n this[_close]()\n }\n }\n\n [_read] () {\n let threw = true\n try {\n if (!this[_reading]) {\n this[_reading] = true\n do {\n const buf = this[_makeBuf]()\n /* istanbul ignore next */\n const br = buf.length === 0 ? 0 : fs.readSync(this[_fd], buf, 0, buf.length, null)\n if (!this[_handleChunk](br, buf))\n break\n } while (true)\n this[_reading] = false\n }\n threw = false\n } finally {\n if (threw)\n this[_close]()\n }\n }\n\n [_close] () {\n if (this[_autoClose] && typeof this[_fd] === 'number') {\n try {\n fs.closeSync(this[_fd])\n } catch (er) {}\n this[_fd] = null\n this.emit('close')\n }\n }\n}\n\nclass WriteStream extends EE {\n constructor (path, opt) {\n opt = opt || {}\n super(opt)\n this.readable = false\n this[_writing] = false\n this[_ended] = false\n this[_needDrain] = false\n this[_queue] = []\n this[_path] = path\n this[_fd] = typeof opt.fd === 'number' ? opt.fd : null\n this[_mode] = opt.mode === undefined ? 0o666 : opt.mode\n this[_pos] = typeof opt.start === 'number' ? opt.start : null\n this[_autoClose] = typeof opt.autoClose === 'boolean' ?\n opt.autoClose : true\n\n // truncating makes no sense when writing into the middle\n const defaultFlag = this[_pos] !== null ? 'r+' : 'w'\n this[_defaultFlag] = opt.flags === undefined\n this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags\n\n if (this[_fd] === null)\n this[_open]()\n }\n\n get fd () { return this[_fd] }\n get path () { return this[_path] }\n\n [_onerror] (er) {\n this[_close]()\n this[_writing] = true\n this.emit('error', er)\n }\n\n [_open] () {\n fs.open(this[_path], this[_flags], this[_mode],\n (er, fd) => this[_onopen](er, fd))\n }\n\n [_onopen] (er, fd) {\n if (this[_defaultFlag] &&\n this[_flags] === 'r+' &&\n er && er.code === 'ENOENT') {\n this[_flags] = 'w'\n this[_open]()\n } else if (er)\n this[_onerror](er)\n else {\n this[_fd] = fd\n this.emit('open', fd)\n this[_flush]()\n }\n }\n\n end (buf, enc) {\n if (buf)\n this.write(buf, enc)\n\n this[_ended] = true\n\n // synthetic after-write logic, where drain/finish live\n if (!this[_writing] && !this[_queue].length &&\n typeof this[_fd] === 'number')\n this[_onwrite](null, 0)\n }\n\n write (buf, enc) {\n if (typeof buf === 'string')\n buf = new Buffer(buf, enc)\n\n if (this[_ended]) {\n this.emit('error', new Error('write() after end()'))\n return false\n }\n\n if (this[_fd] === null || this[_writing] || this[_queue].length) {\n this[_queue].push(buf)\n this[_needDrain] = true\n return false\n }\n\n this[_writing] = true\n this[_write](buf)\n return true\n }\n\n [_write] (buf) {\n fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>\n this[_onwrite](er, bw))\n }\n\n [_onwrite] (er, bw) {\n if (er)\n this[_onerror](er)\n else {\n if (this[_pos] !== null)\n this[_pos] += bw\n if (this[_queue].length)\n this[_flush]()\n else {\n this[_writing] = false\n\n if (this[_ended] && !this[_finished]) {\n this[_finished] = true\n this[_close]()\n this.emit('finish')\n } else if (this[_needDrain]) {\n this[_needDrain] = false\n this.emit('drain')\n }\n }\n }\n }\n\n [_flush] () {\n if (this[_queue].length === 0) {\n if (this[_ended])\n this[_onwrite](null, 0)\n } else if (this[_queue].length === 1)\n this[_write](this[_queue].pop())\n else {\n const iovec = this[_queue]\n this[_queue] = []\n writev(this[_fd], iovec, this[_pos],\n (er, bw) => this[_onwrite](er, bw))\n }\n }\n\n [_close] () {\n if (this[_autoClose] && typeof this[_fd] === 'number') {\n fs.close(this[_fd], _ => this.emit('close'))\n this[_fd] = null\n }\n }\n}\n\nclass WriteStreamSync extends WriteStream {\n [_open] () {\n let fd\n try {\n fd = fs.openSync(this[_path], this[_flags], this[_mode])\n } catch (er) {\n if (this[_defaultFlag] &&\n this[_flags] === 'r+' &&\n er && er.code === 'ENOENT') {\n this[_flags] = 'w'\n return this[_open]()\n } else\n throw er\n }\n this[_onopen](null, fd)\n }\n\n [_close] () {\n if (this[_autoClose] && typeof this[_fd] === 'number') {\n try {\n fs.closeSync(this[_fd])\n } catch (er) {}\n this[_fd] = null\n this.emit('close')\n }\n }\n\n [_write] (buf) {\n try {\n this[_onwrite](null,\n fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))\n } catch (er) {\n this[_onwrite](er, 0)\n }\n }\n}\n\nconst writev = (fd, iovec, pos, cb) => {\n const done = (er, bw) => cb(er, bw, iovec)\n const req = new FSReqWrap()\n req.oncomplete = done\n binding.writeBuffers(fd, iovec, pos, req)\n}\n\nexports.ReadStream = ReadStream\nexports.ReadStreamSync = ReadStreamSync\n\nexports.WriteStream = WriteStream\nexports.WriteStreamSync = WriteStreamSync\n","module.exports = require(\"assert\");","'use strict'\n\nconst u = require('universalify').fromCallback\nconst rimraf = require('./rimraf')\n\nmodule.exports = {\n remove: u(rimraf),\n removeSync: rimraf.sync\n}\n","module.exports = require(\"events\");","'use strict'\nmodule.exports = Yallist\n\nYallist.Node = Node\nYallist.create = Yallist\n\nfunction Yallist (list) {\n var self = this\n if (!(self instanceof Yallist)) {\n self = new Yallist()\n }\n\n self.tail = null\n self.head = null\n self.length = 0\n\n if (list && typeof list.forEach === 'function') {\n list.forEach(function (item) {\n self.push(item)\n })\n } else if (arguments.length > 0) {\n for (var i = 0, l = arguments.length; i < l; i++) {\n self.push(arguments[i])\n }\n }\n\n return self\n}\n\nYallist.prototype.removeNode = function (node) {\n if (node.list !== this) {\n throw new Error('removing node which does not belong to this list')\n }\n\n var next = node.next\n var prev = node.prev\n\n if (next) {\n next.prev = prev\n }\n\n if (prev) {\n prev.next = next\n }\n\n if (node === this.head) {\n this.head = next\n }\n if (node === this.tail) {\n this.tail = prev\n }\n\n node.list.length--\n node.next = null\n node.prev = null\n node.list = null\n}\n\nYallist.prototype.unshiftNode = function (node) {\n if (node === this.head) {\n return\n }\n\n if (node.list) {\n node.list.removeNode(node)\n }\n\n var head = this.head\n node.list = this\n node.next = head\n if (head) {\n head.prev = node\n }\n\n this.head = node\n if (!this.tail) {\n this.tail = node\n }\n this.length++\n}\n\nYallist.prototype.pushNode = function (node) {\n if (node === this.tail) {\n return\n }\n\n if (node.list) {\n node.list.removeNode(node)\n }\n\n var tail = this.tail\n node.list = this\n node.prev = tail\n if (tail) {\n tail.next = node\n }\n\n this.tail = node\n if (!this.head) {\n this.head = node\n }\n this.length++\n}\n\nYallist.prototype.push = function () {\n for (var i = 0, l = arguments.length; i < l; i++) {\n push(this, arguments[i])\n }\n return this.length\n}\n\nYallist.prototype.unshift = function () {\n for (var i = 0, l = arguments.length; i < l; i++) {\n unshift(this, arguments[i])\n }\n return this.length\n}\n\nYallist.prototype.pop = function () {\n if (!this.tail) {\n return undefined\n }\n\n var res = this.tail.value\n this.tail = this.tail.prev\n if (this.tail) {\n this.tail.next = null\n } else {\n this.head = null\n }\n this.length--\n return res\n}\n\nYallist.prototype.shift = function () {\n if (!this.head) {\n return undefined\n }\n\n var res = this.head.value\n this.head = this.head.next\n if (this.head) {\n this.head.prev = null\n } else {\n this.tail = null\n }\n this.length--\n return res\n}\n\nYallist.prototype.forEach = function (fn, thisp) {\n thisp = thisp || this\n for (var walker = this.head, i = 0; walker !== null; i++) {\n fn.call(thisp, walker.value, i, this)\n walker = walker.next\n }\n}\n\nYallist.prototype.forEachReverse = function (fn, thisp) {\n thisp = thisp || this\n for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {\n fn.call(thisp, walker.value, i, this)\n walker = walker.prev\n }\n}\n\nYallist.prototype.get = function (n) {\n for (var i = 0, walker = this.head; walker !== null && i < n; i++) {\n // abort out of the list early if we hit a cycle\n walker = walker.next\n }\n if (i === n && walker !== null) {\n return walker.value\n }\n}\n\nYallist.prototype.getReverse = function (n) {\n for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {\n // abort out of the list early if we hit a cycle\n walker = walker.prev\n }\n if (i === n && walker !== null) {\n return walker.value\n }\n}\n\nYallist.prototype.map = function (fn, thisp) {\n thisp = thisp || this\n var res = new Yallist()\n for (var walker = this.head; walker !== null;) {\n res.push(fn.call(thisp, walker.value, this))\n walker = walker.next\n }\n return res\n}\n\nYallist.prototype.mapReverse = function (fn, thisp) {\n thisp = thisp || this\n var res = new Yallist()\n for (var walker = this.tail; walker !== null;) {\n res.push(fn.call(thisp, walker.value, this))\n walker = walker.prev\n }\n return res\n}\n\nYallist.prototype.reduce = function (fn, initial) {\n var acc\n var walker = this.head\n if (arguments.length > 1) {\n acc = initial\n } else if (this.head) {\n walker = this.head.next\n acc = this.head.value\n } else {\n throw new TypeError('Reduce of empty list with no initial value')\n }\n\n for (var i = 0; walker !== null; i++) {\n acc = fn(acc, walker.value, i)\n walker = walker.next\n }\n\n return acc\n}\n\nYallist.prototype.reduceReverse = function (fn, initial) {\n var acc\n var walker = this.tail\n if (arguments.length > 1) {\n acc = initial\n } else if (this.tail) {\n walker = this.tail.prev\n acc = this.tail.value\n } else {\n throw new TypeError('Reduce of empty list with no initial value')\n }\n\n for (var i = this.length - 1; walker !== null; i--) {\n acc = fn(acc, walker.value, i)\n walker = walker.prev\n }\n\n return acc\n}\n\nYallist.prototype.toArray = function () {\n var arr = new Array(this.length)\n for (var i = 0, walker = this.head; walker !== null; i++) {\n arr[i] = walker.value\n walker = walker.next\n }\n return arr\n}\n\nYallist.prototype.toArrayReverse = function () {\n var arr = new Array(this.length)\n for (var i = 0, walker = this.tail; walker !== null; i++) {\n arr[i] = walker.value\n walker = walker.prev\n }\n return arr\n}\n\nYallist.prototype.slice = function (from, to) {\n to = to || this.length\n if (to < 0) {\n to += this.length\n }\n from = from || 0\n if (from < 0) {\n from += this.length\n }\n var ret = new Yallist()\n if (to < from || to < 0) {\n return ret\n }\n if (from < 0) {\n from = 0\n }\n if (to > this.length) {\n to = this.length\n }\n for (var i = 0, walker = this.head; walker !== null && i < from; i++) {\n walker = walker.next\n }\n for (; walker !== null && i < to; i++, walker = walker.next) {\n ret.push(walker.value)\n }\n return ret\n}\n\nYallist.prototype.sliceReverse = function (from, to) {\n to = to || this.length\n if (to < 0) {\n to += this.length\n }\n from = from || 0\n if (from < 0) {\n from += this.length\n }\n var ret = new Yallist()\n if (to < from || to < 0) {\n return ret\n }\n if (from < 0) {\n from = 0\n }\n if (to > this.length) {\n to = this.length\n }\n for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {\n walker = walker.prev\n }\n for (; walker !== null && i > from; i--, walker = walker.prev) {\n ret.push(walker.value)\n }\n return ret\n}\n\nYallist.prototype.reverse = function () {\n var head = this.head\n var tail = this.tail\n for (var walker = head; walker !== null; walker = walker.prev) {\n var p = walker.prev\n walker.prev = walker.next\n walker.next = p\n }\n this.head = tail\n this.tail = head\n return this\n}\n\nfunction push (self, item) {\n self.tail = new Node(item, self.tail, null, self)\n if (!self.head) {\n self.head = self.tail\n }\n self.length++\n}\n\nfunction unshift (self, item) {\n self.head = new Node(item, null, self.head, self)\n if (!self.tail) {\n self.tail = self.head\n }\n self.length++\n}\n\nfunction Node (value, prev, next, list) {\n if (!(this instanceof Node)) {\n return new Node(value, prev, next, list)\n }\n\n this.list = list\n this.value = value\n\n if (prev) {\n prev.next = this\n this.prev = prev\n } else {\n this.prev = null\n }\n\n if (next) {\n next.prev = this\n this.next = next\n } else {\n this.next = null\n }\n}\n\ntry {\n // add if support or Symbol.iterator is present\n require('./iterator.js')\n} catch (er) {}\n","'use strict'\nconst types = require('./types.js')\nconst MiniPass = require('minipass')\n\nconst SLURP = Symbol('slurp')\nmodule.exports = class ReadEntry extends MiniPass {\n constructor (header, ex, gex) {\n super()\n this.extended = ex\n this.globalExtended = gex\n this.header = header\n this.startBlockSize = 512 * Math.ceil(header.size / 512)\n this.blockRemain = this.startBlockSize\n this.remain = header.size\n this.type = header.type\n this.meta = false\n this.ignore = false\n switch (this.type) {\n case 'File':\n case 'OldFile':\n case 'Link':\n case 'SymbolicLink':\n case 'CharacterDevice':\n case 'BlockDevice':\n case 'Directory':\n case 'FIFO':\n case 'ContiguousFile':\n case 'GNUDumpDir':\n break\n\n case 'NextFileHasLongLinkpath':\n case 'NextFileHasLongPath':\n case 'OldGnuLongPath':\n case 'GlobalExtendedHeader':\n case 'ExtendedHeader':\n case 'OldExtendedHeader':\n this.meta = true\n break\n\n // NOTE: gnutar and bsdtar treat unrecognized types as 'File'\n // it may be worth doing the same, but with a warning.\n default:\n this.ignore = true\n }\n\n this.path = header.path\n this.mode = header.mode\n if (this.mode)\n this.mode = this.mode & 0o7777\n this.uid = header.uid\n this.gid = header.gid\n this.uname = header.uname\n this.gname = header.gname\n this.size = header.size\n this.mtime = header.mtime\n this.atime = header.atime\n this.ctime = header.ctime\n this.linkpath = header.linkpath\n this.uname = header.uname\n this.gname = header.gname\n\n if (ex) this[SLURP](ex)\n if (gex) this[SLURP](gex, true)\n }\n\n write (data) {\n const writeLen = data.length\n if (writeLen > this.blockRemain)\n throw new Error('writing more to entry than is appropriate')\n\n const r = this.remain\n const br = this.blockRemain\n this.remain = Math.max(0, r - writeLen)\n this.blockRemain = Math.max(0, br - writeLen)\n if (this.ignore)\n return true\n\n if (r >= writeLen)\n return super.write(data)\n\n // r < writeLen\n return super.write(data.slice(0, r))\n }\n\n [SLURP] (ex, global) {\n for (let k in ex) {\n // we slurp in everything except for the path attribute in\n // a global extended header, because that's weird.\n if (ex[k] !== null && ex[k] !== undefined &&\n !(global && k === 'path'))\n this[k] = ex[k]\n }\n }\n}\n","'use strict'\n// map types from key to human-friendly name\nexports.name = new Map([\n ['0', 'File'],\n // same as File\n ['', 'OldFile'],\n ['1', 'Link'],\n ['2', 'SymbolicLink'],\n // Devices and FIFOs aren't fully supported\n // they are parsed, but skipped when unpacking\n ['3', 'CharacterDevice'],\n ['4', 'BlockDevice'],\n ['5', 'Directory'],\n ['6', 'FIFO'],\n // same as File\n ['7', 'ContiguousFile'],\n // pax headers\n ['g', 'GlobalExtendedHeader'],\n ['x', 'ExtendedHeader'],\n // vendor-specific stuff\n // skip\n ['A', 'SolarisACL'],\n // like 5, but with data, which should be skipped\n ['D', 'GNUDumpDir'],\n // metadata only, skip\n ['I', 'Inode'],\n // data = link path of next file\n ['K', 'NextFileHasLongLinkpath'],\n // data = path of next file\n ['L', 'NextFileHasLongPath'],\n // skip\n ['M', 'ContinuationFile'],\n // like L\n ['N', 'OldGnuLongPath'],\n // skip\n ['S', 'SparseFile'],\n // skip\n ['V', 'TapeVolumeHeader'],\n // like x\n ['X', 'OldExtendedHeader']\n])\n\n// map the other direction\nexports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))\n","'use strict'\n\n// this[BUFFER] is the remainder of a chunk if we're waiting for\n// the full 512 bytes of a header to come in. We will Buffer.concat()\n// it to the next write(), which is a mem copy, but a small one.\n//\n// this[QUEUE] is a Yallist of entries that haven't been emitted\n// yet this can only get filled up if the user keeps write()ing after\n// a write() returns false, or does a write() with more than one entry\n//\n// We don't buffer chunks, we always parse them and either create an\n// entry, or push it into the active entry. The ReadEntry class knows\n// to throw data away if .ignore=true\n//\n// Shift entry off the buffer when it emits 'end', and emit 'entry' for\n// the next one in the list.\n//\n// At any time, we're pushing body chunks into the entry at WRITEENTRY,\n// and waiting for 'end' on the entry at READENTRY\n//\n// ignored entries get .resume() called on them straight away\n\nconst warner = require('./warn-mixin.js')\nconst path = require('path')\nconst Header = require('./header.js')\nconst EE = require('events')\nconst Yallist = require('yallist')\nconst maxMetaEntrySize = 1024 * 1024\nconst Entry = require('./read-entry.js')\nconst Pax = require('./pax.js')\nconst zlib = require('minizlib')\n\nconst gzipHeader = Buffer.from([0x1f, 0x8b])\nconst STATE = Symbol('state')\nconst WRITEENTRY = Symbol('writeEntry')\nconst READENTRY = Symbol('readEntry')\nconst NEXTENTRY = Symbol('nextEntry')\nconst PROCESSENTRY = Symbol('processEntry')\nconst EX = Symbol('extendedHeader')\nconst GEX = Symbol('globalExtendedHeader')\nconst META = Symbol('meta')\nconst EMITMETA = Symbol('emitMeta')\nconst BUFFER = Symbol('buffer')\nconst QUEUE = Symbol('queue')\nconst ENDED = Symbol('ended')\nconst EMITTEDEND = Symbol('emittedEnd')\nconst EMIT = Symbol('emit')\nconst UNZIP = Symbol('unzip')\nconst CONSUMECHUNK = Symbol('consumeChunk')\nconst CONSUMECHUNKSUB = Symbol('consumeChunkSub')\nconst CONSUMEBODY = Symbol('consumeBody')\nconst CONSUMEMETA = Symbol('consumeMeta')\nconst CONSUMEHEADER = Symbol('consumeHeader')\nconst CONSUMING = Symbol('consuming')\nconst BUFFERCONCAT = Symbol('bufferConcat')\nconst MAYBEEND = Symbol('maybeEnd')\nconst WRITING = Symbol('writing')\nconst ABORTED = Symbol('aborted')\nconst DONE = Symbol('onDone')\n\nconst noop = _ => true\n\nmodule.exports = warner(class Parser extends EE {\n constructor (opt) {\n opt = opt || {}\n super(opt)\n\n if (opt.ondone)\n this.on(DONE, opt.ondone)\n else\n this.on(DONE, _ => {\n this.emit('prefinish')\n this.emit('finish')\n this.emit('end')\n this.emit('close')\n })\n\n this.strict = !!opt.strict\n this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize\n this.filter = typeof opt.filter === 'function' ? opt.filter : noop\n\n // have to set this so that streams are ok piping into it\n this.writable = true\n this.readable = false\n\n this[QUEUE] = new Yallist()\n this[BUFFER] = null\n this[READENTRY] = null\n this[WRITEENTRY] = null\n this[STATE] = 'begin'\n this[META] = ''\n this[EX] = null\n this[GEX] = null\n this[ENDED] = false\n this[UNZIP] = null\n this[ABORTED] = false\n if (typeof opt.onwarn === 'function')\n this.on('warn', opt.onwarn)\n if (typeof opt.onentry === 'function')\n this.on('entry', opt.onentry)\n }\n\n [CONSUMEHEADER] (chunk, position) {\n const header = new Header(chunk, position, this[EX], this[GEX])\n\n if (header.nullBlock)\n this[EMIT]('nullBlock')\n else if (!header.cksumValid)\n this.warn('invalid entry', header)\n else if (!header.path)\n this.warn('invalid: path is required', header)\n else {\n const type = header.type\n if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)\n this.warn('invalid: linkpath required', header)\n else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)\n this.warn('invalid: linkpath forbidden', header)\n else {\n const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])\n\n if (entry.meta) {\n if (entry.size > this.maxMetaEntrySize) {\n entry.ignore = true\n this[EMIT]('ignoredEntry', entry)\n this[STATE] = 'ignore'\n } else if (entry.size > 0) {\n this[META] = ''\n entry.on('data', c => this[META] += c)\n this[STATE] = 'meta'\n }\n } else {\n\n this[EX] = null\n entry.ignore = entry.ignore || !this.filter(entry.path, entry)\n if (entry.ignore) {\n this[EMIT]('ignoredEntry', entry)\n this[STATE] = entry.remain ? 'ignore' : 'begin'\n } else {\n if (entry.remain)\n this[STATE] = 'body'\n else {\n this[STATE] = 'begin'\n entry.end()\n }\n\n if (!this[READENTRY]) {\n this[QUEUE].push(entry)\n this[NEXTENTRY]()\n } else\n this[QUEUE].push(entry)\n }\n }\n }\n }\n }\n\n [PROCESSENTRY] (entry) {\n let go = true\n\n if (!entry) {\n this[READENTRY] = null\n go = false\n } else if (Array.isArray(entry))\n this.emit.apply(this, entry)\n else {\n this[READENTRY] = entry\n this.emit('entry', entry)\n if (!entry.emittedEnd) {\n entry.on('end', _ => this[NEXTENTRY]())\n go = false\n }\n }\n\n return go\n }\n\n [NEXTENTRY] () {\n do {} while (this[PROCESSENTRY](this[QUEUE].shift()))\n\n if (!this[QUEUE].length) {\n // At this point, there's nothing in the queue, but we may have an\n // entry which is being consumed (readEntry).\n // If we don't, then we definitely can handle more data.\n // If we do, and either it's flowing, or it has never had any data\n // written to it, then it needs more.\n // The only other possibility is that it has returned false from a\n // write() call, so we wait for the next drain to continue.\n const re = this[READENTRY]\n const drainNow = !re || re.flowing || re.size === re.remain\n if (drainNow) {\n if (!this[WRITING])\n this.emit('drain')\n } else\n re.once('drain', _ => this.emit('drain'))\n }\n }\n\n [CONSUMEBODY] (chunk, position) {\n // write up to but no more than writeEntry.blockRemain\n const entry = this[WRITEENTRY]\n const br = entry.blockRemain\n const c = (br >= chunk.length && position === 0) ? chunk\n : chunk.slice(position, position + br)\n\n entry.write(c)\n\n if (!entry.blockRemain) {\n this[STATE] = 'begin'\n this[WRITEENTRY] = null\n entry.end()\n }\n\n return c.length\n }\n\n [CONSUMEMETA] (chunk, position) {\n const entry = this[WRITEENTRY]\n const ret = this[CONSUMEBODY](chunk, position)\n\n // if we finished, then the entry is reset\n if (!this[WRITEENTRY])\n this[EMITMETA](entry)\n\n return ret\n }\n\n [EMIT] (ev, data, extra) {\n if (!this[QUEUE].length && !this[READENTRY])\n this.emit(ev, data, extra)\n else\n this[QUEUE].push([ev, data, extra])\n }\n\n [EMITMETA] (entry) {\n this[EMIT]('meta', this[META])\n switch (entry.type) {\n case 'ExtendedHeader':\n case 'OldExtendedHeader':\n this[EX] = Pax.parse(this[META], this[EX], false)\n break\n\n case 'GlobalExtendedHeader':\n this[GEX] = Pax.parse(this[META], this[GEX], true)\n break\n\n case 'NextFileHasLongPath':\n case 'OldGnuLongPath':\n this[EX] = this[EX] || Object.create(null)\n this[EX].path = this[META].replace(/\\0.*/, '')\n break\n\n case 'NextFileHasLongLinkpath':\n this[EX] = this[EX] || Object.create(null)\n this[EX].linkpath = this[META].replace(/\\0.*/, '')\n break\n\n /* istanbul ignore next */\n default: throw new Error('unknown meta: ' + entry.type)\n }\n }\n\n abort (msg, error) {\n this[ABORTED] = true\n this.warn(msg, error)\n this.emit('abort', error)\n this.emit('error', error)\n }\n\n write (chunk) {\n if (this[ABORTED])\n return\n\n // first write, might be gzipped\n if (this[UNZIP] === null && chunk) {\n if (this[BUFFER]) {\n chunk = Buffer.concat([this[BUFFER], chunk])\n this[BUFFER] = null\n }\n if (chunk.length < gzipHeader.length) {\n this[BUFFER] = chunk\n return true\n }\n for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {\n if (chunk[i] !== gzipHeader[i])\n this[UNZIP] = false\n }\n if (this[UNZIP] === null) {\n const ended = this[ENDED]\n this[ENDED] = false\n this[UNZIP] = new zlib.Unzip()\n this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))\n this[UNZIP].on('error', er =>\n this.abort(er.message, er))\n this[UNZIP].on('end', _ => {\n this[ENDED] = true\n this[CONSUMECHUNK]()\n })\n this[WRITING] = true\n const ret = this[UNZIP][ended ? 'end' : 'write' ](chunk)\n this[WRITING] = false\n return ret\n }\n }\n\n this[WRITING] = true\n if (this[UNZIP])\n this[UNZIP].write(chunk)\n else\n this[CONSUMECHUNK](chunk)\n this[WRITING] = false\n\n // return false if there's a queue, or if the current entry isn't flowing\n const ret =\n this[QUEUE].length ? false :\n this[READENTRY] ? this[READENTRY].flowing :\n true\n\n // if we have no queue, then that means a clogged READENTRY\n if (!ret && !this[QUEUE].length)\n this[READENTRY].once('drain', _ => this.emit('drain'))\n\n return ret\n }\n\n [BUFFERCONCAT] (c) {\n if (c && !this[ABORTED])\n this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c\n }\n\n [MAYBEEND] () {\n if (this[ENDED] &&\n !this[EMITTEDEND] &&\n !this[ABORTED] &&\n !this[CONSUMING]) {\n this[EMITTEDEND] = true\n const entry = this[WRITEENTRY]\n if (entry && entry.blockRemain) {\n const have = this[BUFFER] ? this[BUFFER].length : 0\n this.warn('Truncated input (needed ' + entry.blockRemain +\n ' more bytes, only ' + have + ' available)', entry)\n if (this[BUFFER])\n entry.write(this[BUFFER])\n entry.end()\n }\n this[EMIT](DONE)\n }\n }\n\n [CONSUMECHUNK] (chunk) {\n if (this[CONSUMING]) {\n this[BUFFERCONCAT](chunk)\n } else if (!chunk && !this[BUFFER]) {\n this[MAYBEEND]()\n } else {\n this[CONSUMING] = true\n if (this[BUFFER]) {\n this[BUFFERCONCAT](chunk)\n const c = this[BUFFER]\n this[BUFFER] = null\n this[CONSUMECHUNKSUB](c)\n } else {\n this[CONSUMECHUNKSUB](chunk)\n }\n\n while (this[BUFFER] && this[BUFFER].length >= 512 && !this[ABORTED]) {\n const c = this[BUFFER]\n this[BUFFER] = null\n this[CONSUMECHUNKSUB](c)\n }\n this[CONSUMING] = false\n }\n\n if (!this[BUFFER] || this[ENDED])\n this[MAYBEEND]()\n }\n\n [CONSUMECHUNKSUB] (chunk) {\n // we know that we are in CONSUMING mode, so anything written goes into\n // the buffer. Advance the position and put any remainder in the buffer.\n let position = 0\n let length = chunk.length\n while (position + 512 <= length && !this[ABORTED]) {\n switch (this[STATE]) {\n case 'begin':\n this[CONSUMEHEADER](chunk, position)\n position += 512\n break\n\n case 'ignore':\n case 'body':\n position += this[CONSUMEBODY](chunk, position)\n break\n\n case 'meta':\n position += this[CONSUMEMETA](chunk, position)\n break\n\n /* istanbul ignore next */\n default:\n throw new Error('invalid state: ' + this[STATE])\n }\n }\n\n if (position < length) {\n if (this[BUFFER])\n this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])\n else\n this[BUFFER] = chunk.slice(position)\n }\n }\n\n end (chunk) {\n if (!this[ABORTED]) {\n if (this[UNZIP])\n this[UNZIP].end(chunk)\n else {\n this[ENDED] = true\n this.write(chunk)\n }\n }\n }\n})\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst jsonFile = require('jsonfile')\n\nmodule.exports = {\n // jsonfile exports\n readJson: u(jsonFile.readFile),\n readJsonSync: jsonFile.readFileSync,\n writeJson: u(jsonFile.writeFile),\n writeJsonSync: jsonFile.writeFileSync\n}\n","module.exports = require(\"url\");","\nvar urilib = require('url');\nvar helpers = require('./helpers');\n\nmodule.exports.SchemaScanResult = SchemaScanResult;\nfunction SchemaScanResult(found, ref){\n this.id = found;\n this.ref = ref;\n}\n\n/**\n * Adds a schema with a certain urn to the Validator instance.\n * @param string uri\n * @param object schema\n * @return {Object}\n */\nmodule.exports.scan = function scan(base, schema){\n function scanSchema(baseuri, schema){\n if(!schema || typeof schema!='object') return;\n // Mark all referenced schemas so we can tell later which schemas are referred to, but never defined\n if(schema.$ref){\n var resolvedUri = urilib.resolve(baseuri, schema.$ref);\n ref[resolvedUri] = ref[resolvedUri] ? ref[resolvedUri]+1 : 0;\n return;\n }\n var ourBase = schema.id ? urilib.resolve(baseuri, schema.id) : baseuri;\n if (ourBase) {\n // If there's no fragment, append an empty one\n if(ourBase.indexOf('#')<0) ourBase += '#';\n if(found[ourBase]){\n if(!helpers.deepCompareStrict(found[ourBase], schema)){\n throw new Error('Schema <'+schema+'> already exists with different definition');\n }\n return found[ourBase];\n }\n found[ourBase] = schema;\n // strip trailing fragment\n if(ourBase[ourBase.length-1]=='#'){\n found[ourBase.substring(0, ourBase.length-1)] = schema;\n }\n }\n scanArray(ourBase+'/items', ((schema.items instanceof Array)?schema.items:[schema.items]));\n scanArray(ourBase+'/extends', ((schema.extends instanceof Array)?schema.extends:[schema.extends]));\n scanSchema(ourBase+'/additionalItems', schema.additionalItems);\n scanObject(ourBase+'/properties', schema.properties);\n scanSchema(ourBase+'/additionalProperties', schema.additionalProperties);\n scanObject(ourBase+'/definitions', schema.definitions);\n scanObject(ourBase+'/patternProperties', schema.patternProperties);\n scanObject(ourBase+'/dependencies', schema.dependencies);\n scanArray(ourBase+'/disallow', schema.disallow);\n scanArray(ourBase+'/allOf', schema.allOf);\n scanArray(ourBase+'/anyOf', schema.anyOf);\n scanArray(ourBase+'/oneOf', schema.oneOf);\n scanSchema(ourBase+'/not', schema.not);\n }\n function scanArray(baseuri, schemas){\n if(!(schemas instanceof Array)) return;\n for(var i=0; i super.write(chunk))\n this.zip.on('end', _ => super.end())\n this.zip.on('drain', _ => this[ONDRAIN]())\n this.on('resume', _ => this.zip.resume())\n } else\n this.on('drain', this[ONDRAIN])\n\n this.portable = !!opt.portable\n this.noDirRecurse = !!opt.noDirRecurse\n this.follow = !!opt.follow\n this.noMtime = !!opt.noMtime\n this.mtime = opt.mtime || null\n\n this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true\n\n this[QUEUE] = new Yallist\n this[JOBS] = 0\n this.jobs = +opt.jobs || 4\n this[PROCESSING] = false\n this[ENDED] = false\n }\n\n [WRITE] (chunk) {\n return super.write(chunk)\n }\n\n add (path) {\n this.write(path)\n return this\n }\n\n end (path) {\n if (path)\n this.write(path)\n this[ENDED] = true\n this[PROCESS]()\n return this\n }\n\n write (path) {\n if (this[ENDED])\n throw new Error('write after end')\n\n if (path instanceof ReadEntry)\n this[ADDTARENTRY](path)\n else\n this[ADDFSENTRY](path)\n return this.flowing\n }\n\n [ADDTARENTRY] (p) {\n const absolute = path.resolve(this.cwd, p.path)\n if (this.prefix)\n p.path = this.prefix + '/' + p.path.replace(/^\\.(\\/+|$)/, '')\n\n // in this case, we don't have to wait for the stat\n if (!this.filter(p.path, p))\n p.resume()\n else {\n const job = new PackJob(p.path, absolute, false)\n job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))\n job.entry.on('end', _ => this[JOBDONE](job))\n this[JOBS] += 1\n this[QUEUE].push(job)\n }\n\n this[PROCESS]()\n }\n\n [ADDFSENTRY] (p) {\n const absolute = path.resolve(this.cwd, p)\n if (this.prefix)\n p = this.prefix + '/' + p.replace(/^\\.(\\/+|$)/, '')\n\n this[QUEUE].push(new PackJob(p, absolute))\n this[PROCESS]()\n }\n\n [STAT] (job) {\n job.pending = true\n this[JOBS] += 1\n const stat = this.follow ? 'stat' : 'lstat'\n fs[stat](job.absolute, (er, stat) => {\n job.pending = false\n this[JOBS] -= 1\n if (er)\n this.emit('error', er)\n else\n this[ONSTAT](job, stat)\n })\n }\n\n [ONSTAT] (job, stat) {\n this.statCache.set(job.absolute, stat)\n job.stat = stat\n\n // now we have the stat, we can filter it.\n if (!this.filter(job.path, stat))\n job.ignore = true\n\n this[PROCESS]()\n }\n\n [READDIR] (job) {\n job.pending = true\n this[JOBS] += 1\n fs.readdir(job.absolute, (er, entries) => {\n job.pending = false\n this[JOBS] -= 1\n if (er)\n return this.emit('error', er)\n this[ONREADDIR](job, entries)\n })\n }\n\n [ONREADDIR] (job, entries) {\n this.readdirCache.set(job.absolute, entries)\n job.readdir = entries\n this[PROCESS]()\n }\n\n [PROCESS] () {\n if (this[PROCESSING])\n return\n\n this[PROCESSING] = true\n for (let w = this[QUEUE].head;\n w !== null && this[JOBS] < this.jobs;\n w = w.next) {\n this[PROCESSJOB](w.value)\n if (w.value.ignore) {\n const p = w.next\n this[QUEUE].removeNode(w)\n w.next = p\n }\n }\n\n this[PROCESSING] = false\n\n if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {\n if (this.zip)\n this.zip.end(EOF)\n else {\n super.write(EOF)\n super.end()\n }\n }\n }\n\n get [CURRENT] () {\n return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value\n }\n\n [JOBDONE] (job) {\n this[QUEUE].shift()\n this[JOBS] -= 1\n this[PROCESS]()\n }\n\n [PROCESSJOB] (job) {\n if (job.pending)\n return\n\n if (job.entry) {\n if (job === this[CURRENT] && !job.piped)\n this[PIPE](job)\n return\n }\n\n if (!job.stat) {\n if (this.statCache.has(job.absolute))\n this[ONSTAT](job, this.statCache.get(job.absolute))\n else\n this[STAT](job)\n }\n if (!job.stat)\n return\n\n // filtered out!\n if (job.ignore)\n return\n\n if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {\n if (this.readdirCache.has(job.absolute))\n this[ONREADDIR](job, this.readdirCache.get(job.absolute))\n else\n this[READDIR](job)\n if (!job.readdir)\n return\n }\n\n // we know it doesn't have an entry, because that got checked above\n job.entry = this[ENTRY](job)\n if (!job.entry) {\n job.ignore = true\n return\n }\n\n if (job === this[CURRENT] && !job.piped)\n this[PIPE](job)\n }\n\n [ENTRYOPT] (job) {\n return {\n onwarn: (msg, data) => {\n this.warn(msg, data)\n },\n noPax: this.noPax,\n cwd: this.cwd,\n absolute: job.absolute,\n preservePaths: this.preservePaths,\n maxReadSize: this.maxReadSize,\n strict: this.strict,\n portable: this.portable,\n linkCache: this.linkCache,\n statCache: this.statCache,\n noMtime: this.noMtime,\n mtime: this.mtime\n }\n }\n\n [ENTRY] (job) {\n this[JOBS] += 1\n try {\n return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))\n .on('end', () => this[JOBDONE](job))\n .on('error', er => this.emit('error', er))\n } catch (er) {\n this.emit('error', er)\n }\n }\n\n [ONDRAIN] () {\n if (this[CURRENT] && this[CURRENT].entry)\n this[CURRENT].entry.resume()\n }\n\n // like .pipe() but using super, because our write() is special\n [PIPE] (job) {\n job.piped = true\n\n if (job.readdir)\n job.readdir.forEach(entry => {\n const p = this.prefix ?\n job.path.slice(this.prefix.length + 1) || './'\n : job.path\n\n const base = p === './' ? '' : p.replace(/\\/*$/, '/')\n this[ADDFSENTRY](base + entry)\n })\n\n const source = job.entry\n const zip = this.zip\n\n if (zip)\n source.on('data', chunk => {\n if (!zip.write(chunk))\n source.pause()\n })\n else\n source.on('data', chunk => {\n if (!super.write(chunk))\n source.pause()\n })\n }\n\n pause () {\n if (this.zip)\n this.zip.pause()\n return super.pause()\n }\n})\n\nclass PackSync extends Pack {\n constructor (opt) {\n super(opt)\n this[WRITEENTRYCLASS] = WriteEntrySync\n }\n\n // pause/resume are no-ops in sync streams.\n pause () {}\n resume () {}\n\n [STAT] (job) {\n const stat = this.follow ? 'statSync' : 'lstatSync'\n this[ONSTAT](job, fs[stat](job.absolute))\n }\n\n [READDIR] (job, stat) {\n this[ONREADDIR](job, fs.readdirSync(job.absolute))\n }\n\n // gotta get it all in this tick\n [PIPE] (job) {\n const source = job.entry\n const zip = this.zip\n\n if (job.readdir)\n job.readdir.forEach(entry => {\n const p = this.prefix ?\n job.path.slice(this.prefix.length + 1) || './'\n : job.path\n\n const base = p === './' ? '' : p.replace(/\\/*$/, '/')\n this[ADDFSENTRY](base + entry)\n })\n\n if (zip)\n source.on('data', chunk => {\n zip.write(chunk)\n })\n else\n source.on('data', chunk => {\n super[WRITE](chunk)\n })\n }\n}\n\nPack.Sync = PackSync\n\nmodule.exports = Pack\n","'use strict'\nconst Buffer = require('./buffer.js')\nconst Header = require('./header.js')\nconst path = require('path')\n\nclass Pax {\n constructor (obj, global) {\n this.atime = obj.atime || null\n this.charset = obj.charset || null\n this.comment = obj.comment || null\n this.ctime = obj.ctime || null\n this.gid = obj.gid || null\n this.gname = obj.gname || null\n this.linkpath = obj.linkpath || null\n this.mtime = obj.mtime || null\n this.path = obj.path || null\n this.size = obj.size || null\n this.uid = obj.uid || null\n this.uname = obj.uname || null\n this.dev = obj.dev || null\n this.ino = obj.ino || null\n this.nlink = obj.nlink || null\n this.global = global || false\n }\n\n encode () {\n const body = this.encodeBody()\n if (body === '')\n return null\n\n const bodyLen = Buffer.byteLength(body)\n // round up to 512 bytes\n // add 512 for header\n const bufLen = 512 * Math.ceil(1 + bodyLen / 512)\n const buf = Buffer.allocUnsafe(bufLen)\n\n // 0-fill the header section, it might not hit every field\n for (let i = 0; i < 512; i++) {\n buf[i] = 0\n }\n\n new Header({\n // XXX split the path\n // then the path should be PaxHeader + basename, but less than 99,\n // prepend with the dirname\n path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),\n mode: this.mode || 0o644,\n uid: this.uid || null,\n gid: this.gid || null,\n size: bodyLen,\n mtime: this.mtime || null,\n type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',\n linkpath: '',\n uname: this.uname || '',\n gname: this.gname || '',\n devmaj: 0,\n devmin: 0,\n atime: this.atime || null,\n ctime: this.ctime || null\n }).encode(buf)\n\n buf.write(body, 512, bodyLen, 'utf8')\n\n // null pad after the body\n for (let i = bodyLen + 512; i < buf.length; i++) {\n buf[i] = 0\n }\n\n return buf\n }\n\n encodeBody () {\n return (\n this.encodeField('path') +\n this.encodeField('ctime') +\n this.encodeField('atime') +\n this.encodeField('dev') +\n this.encodeField('ino') +\n this.encodeField('nlink') +\n this.encodeField('charset') +\n this.encodeField('comment') +\n this.encodeField('gid') +\n this.encodeField('gname') +\n this.encodeField('linkpath') +\n this.encodeField('mtime') +\n this.encodeField('size') +\n this.encodeField('uid') +\n this.encodeField('uname')\n )\n }\n\n encodeField (field) {\n if (this[field] === null || this[field] === undefined)\n return ''\n const v = this[field] instanceof Date ? this[field].getTime() / 1000\n : this[field]\n const s = ' ' +\n (field === 'dev' || field === 'ino' || field === 'nlink'\n ? 'SCHILY.' : '') +\n field + '=' + v + '\\n'\n const byteLen = Buffer.byteLength(s)\n // the digits includes the length of the digits in ascii base-10\n // so if it's 9 characters, then adding 1 for the 9 makes it 10\n // which makes it 11 chars.\n let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1\n if (byteLen + digits >= Math.pow(10, digits))\n digits += 1\n const len = digits + byteLen\n return len + s\n }\n}\n\nPax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)\n\nconst merge = (a, b) =>\n b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a\n\nconst parseKV = string =>\n string\n .replace(/\\n$/, '')\n .split('\\n')\n .reduce(parseKVLine, Object.create(null))\n\nconst parseKVLine = (set, line) => {\n const n = parseInt(line, 10)\n\n // XXX Values with \\n in them will fail this.\n // Refactor to not be a naive line-by-line parse.\n if (n !== Buffer.byteLength(line) + 1)\n return set\n\n line = line.substr((n + ' ').length)\n const kv = line.split('=')\n const k = kv.shift().replace(/^SCHILY\\.(dev|ino|nlink)/, '$1')\n if (!k)\n return set\n\n const v = kv.join('=')\n set[k] = /^([A-Z]+\\.)?([mac]|birth|creation)time$/.test(k)\n ? new Date(v * 1000)\n : /^[0-9]+$/.test(v) ? +v\n : v\n return set\n}\n\nmodule.exports = Pax\n","'use strict'\nmodule.exports = Base => class extends Base {\n warn (msg, data) {\n if (!this.strict)\n this.emit('warn', msg, data)\n else if (data instanceof Error)\n this.emit('error', data)\n else {\n const er = new Error(msg)\n er.data = data\n this.emit('error', er)\n }\n }\n}\n","'use strict'\n\nconst Buffer = require('./buffer.js')\n\n// XXX: This shares a lot in common with extract.js\n// maybe some DRY opportunity here?\n\n// tar -t\nconst hlo = require('./high-level-opt.js')\nconst Parser = require('./parse.js')\nconst fs = require('fs')\nconst fsm = require('fs-minipass')\nconst path = require('path')\n\nconst t = module.exports = (opt_, files, cb) => {\n if (typeof opt_ === 'function')\n cb = opt_, files = null, opt_ = {}\n else if (Array.isArray(opt_))\n files = opt_, opt_ = {}\n\n if (typeof files === 'function')\n cb = files, files = null\n\n if (!files)\n files = []\n else\n files = Array.from(files)\n\n const opt = hlo(opt_)\n\n if (opt.sync && typeof cb === 'function')\n throw new TypeError('callback not supported for sync tar functions')\n\n if (!opt.file && typeof cb === 'function')\n throw new TypeError('callback only supported with file option')\n\n if (files.length)\n filesFilter(opt, files)\n\n if (!opt.noResume)\n onentryFunction(opt)\n\n return opt.file && opt.sync ? listFileSync(opt)\n : opt.file ? listFile(opt, cb)\n : list(opt)\n}\n\nconst onentryFunction = opt => {\n const onentry = opt.onentry\n opt.onentry = onentry ? e => {\n onentry(e)\n e.resume()\n } : e => e.resume()\n}\n\n// construct a filter that limits the file entries listed\n// include child entries if a dir is included\nconst filesFilter = (opt, files) => {\n const map = new Map(files.map(f => [f.replace(/\\/+$/, ''), true]))\n const filter = opt.filter\n\n const mapHas = (file, r) => {\n const root = r || path.parse(file).root || '.'\n const ret = file === root ? false\n : map.has(file) ? map.get(file)\n : mapHas(path.dirname(file), root)\n\n map.set(file, ret)\n return ret\n }\n\n opt.filter = filter\n ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\\/+$/, ''))\n : file => mapHas(file.replace(/\\/+$/, ''))\n}\n\nconst listFileSync = opt => {\n const p = list(opt)\n const file = opt.file\n let threw = true\n let fd\n try {\n const stat = fs.statSync(file)\n const readSize = opt.maxReadSize || 16*1024*1024\n if (stat.size < readSize) {\n p.end(fs.readFileSync(file))\n } else {\n let pos = 0\n const buf = Buffer.allocUnsafe(readSize)\n fd = fs.openSync(file, 'r')\n while (pos < stat.size) {\n let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)\n pos += bytesRead\n p.write(buf.slice(0, bytesRead))\n }\n p.end()\n }\n threw = false\n } finally {\n if (threw && fd)\n try { fs.closeSync(fd) } catch (er) {}\n }\n}\n\nconst listFile = (opt, cb) => {\n const parse = new Parser(opt)\n const readSize = opt.maxReadSize || 16*1024*1024\n\n const file = opt.file\n const p = new Promise((resolve, reject) => {\n parse.on('error', reject)\n parse.on('end', resolve)\n\n fs.stat(file, (er, stat) => {\n if (er)\n reject(er)\n else {\n const stream = new fsm.ReadStream(file, {\n readSize: readSize,\n size: stat.size\n })\n stream.on('error', reject)\n stream.pipe(parse)\n }\n })\n })\n return cb ? p.then(cb, cb) : p\n}\n\nconst list = opt => new Parser(opt)\n","// This is adapted from https://github.com/normalize/mz\n// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors\nconst u = require('universalify').fromCallback\nconst fs = require('graceful-fs')\n\nconst api = [\n 'access',\n 'appendFile',\n 'chmod',\n 'chown',\n 'close',\n 'copyFile',\n 'fchmod',\n 'fchown',\n 'fdatasync',\n 'fstat',\n 'fsync',\n 'ftruncate',\n 'futimes',\n 'lchown',\n 'link',\n 'lstat',\n 'mkdir',\n 'mkdtemp',\n 'open',\n 'readFile',\n 'readdir',\n 'readlink',\n 'realpath',\n 'rename',\n 'rmdir',\n 'stat',\n 'symlink',\n 'truncate',\n 'unlink',\n 'utimes',\n 'writeFile'\n].filter(key => {\n // Some commands are not available on some systems. Ex:\n // fs.copyFile was added in Node.js v8.5.0\n // fs.mkdtemp was added in Node.js v5.10.0\n // fs.lchown is not available on at least some Linux\n return typeof fs[key] === 'function'\n})\n\n// Export all keys:\nObject.keys(fs).forEach(key => {\n exports[key] = fs[key]\n})\n\n// Universalify async methods:\napi.forEach(method => {\n exports[method] = u(fs[method])\n})\n\n// We differ from mz/fs in that we still ship the old, broken, fs.exists()\n// since we are a drop-in replacement for the native module\nexports.exists = function (filename, callback) {\n if (typeof callback === 'function') {\n return fs.exists(filename, callback)\n }\n return new Promise(resolve => {\n return fs.exists(filename, resolve)\n })\n}\n\n// fs.read() & fs.write need special treatment due to multiple callback args\n\nexports.read = function (fd, buffer, offset, length, position, callback) {\n if (typeof callback === 'function') {\n return fs.read(fd, buffer, offset, length, position, callback)\n }\n return new Promise((resolve, reject) => {\n fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {\n if (err) return reject(err)\n resolve({ bytesRead, buffer })\n })\n })\n}\n\n// Function signature can be\n// fs.write(fd, buffer[, offset[, length[, position]]], callback)\n// OR\n// fs.write(fd, string[, position[, encoding]], callback)\n// so we need to handle both cases\nexports.write = function (fd, buffer, a, b, c, callback) {\n if (typeof arguments[arguments.length - 1] === 'function') {\n return fs.write(fd, buffer, a, b, c, callback)\n }\n\n // Check for old, depricated fs.write(fd, string[, position[, encoding]], callback)\n if (typeof buffer === 'string') {\n return new Promise((resolve, reject) => {\n fs.write(fd, buffer, a, b, (err, bytesWritten, buffer) => {\n if (err) return reject(err)\n resolve({ bytesWritten, buffer })\n })\n })\n }\n\n return new Promise((resolve, reject) => {\n fs.write(fd, buffer, a, b, c, (err, bytesWritten, buffer) => {\n if (err) return reject(err)\n resolve({ bytesWritten, buffer })\n })\n })\n}\n","'use strict'\n\nvar fs = require('fs')\n\nmodule.exports = clone(fs)\n\nfunction clone (obj) {\n if (obj === null || typeof obj !== 'object')\n return obj\n\n if (obj instanceof Object)\n var copy = { __proto__: obj.__proto__ }\n else\n var copy = Object.create(null)\n\n Object.getOwnPropertyNames(obj).forEach(function (key) {\n Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))\n })\n\n return copy\n}\n","// imported from ncp (this is temporary, will rewrite)\n\nvar fs = require('graceful-fs')\nvar path = require('path')\nvar utimes = require('../util/utimes')\n\nfunction ncp (source, dest, options, callback) {\n if (!callback) {\n callback = options\n options = {}\n }\n\n var basePath = process.cwd()\n var currentPath = path.resolve(basePath, source)\n var targetPath = path.resolve(basePath, dest)\n\n var filter = options.filter\n var transform = options.transform\n var overwrite = options.overwrite\n // If overwrite is undefined, use clobber, otherwise default to true:\n if (overwrite === undefined) overwrite = options.clobber\n if (overwrite === undefined) overwrite = true\n var errorOnExist = options.errorOnExist\n var dereference = options.dereference\n var preserveTimestamps = options.preserveTimestamps === true\n\n var started = 0\n var finished = 0\n var running = 0\n\n var errored = false\n\n startCopy(currentPath)\n\n function startCopy (source) {\n started++\n if (filter) {\n if (filter instanceof RegExp) {\n console.warn('Warning: fs-extra: Passing a RegExp filter is deprecated, use a function')\n if (!filter.test(source)) {\n return doneOne(true)\n }\n } else if (typeof filter === 'function') {\n if (!filter(source, dest)) {\n return doneOne(true)\n }\n }\n }\n return getStats(source)\n }\n\n function getStats (source) {\n var stat = dereference ? fs.stat : fs.lstat\n running++\n stat(source, function (err, stats) {\n if (err) return onError(err)\n\n // We need to get the mode from the stats object and preserve it.\n var item = {\n name: source,\n mode: stats.mode,\n mtime: stats.mtime, // modified time\n atime: stats.atime, // access time\n stats: stats // temporary\n }\n\n if (stats.isDirectory()) {\n return onDir(item)\n } else if (stats.isFile() || stats.isCharacterDevice() || stats.isBlockDevice()) {\n return onFile(item)\n } else if (stats.isSymbolicLink()) {\n // Symlinks don't really need to know about the mode.\n return onLink(source)\n }\n })\n }\n\n function onFile (file) {\n var target = file.name.replace(currentPath, targetPath.replace('$', '$$$$')) // escapes '$' with '$$'\n isWritable(target, function (writable) {\n if (writable) {\n copyFile(file, target)\n } else {\n if (overwrite) {\n rmFile(target, function () {\n copyFile(file, target)\n })\n } else if (errorOnExist) {\n onError(new Error(target + ' already exists'))\n } else {\n doneOne()\n }\n }\n })\n }\n\n function copyFile (file, target) {\n var readStream = fs.createReadStream(file.name)\n var writeStream = fs.createWriteStream(target, { mode: file.mode })\n\n readStream.on('error', onError)\n writeStream.on('error', onError)\n\n if (transform) {\n transform(readStream, writeStream, file)\n } else {\n writeStream.on('open', function () {\n readStream.pipe(writeStream)\n })\n }\n\n writeStream.once('close', function () {\n fs.chmod(target, file.mode, function (err) {\n if (err) return onError(err)\n if (preserveTimestamps) {\n utimes.utimesMillis(target, file.atime, file.mtime, function (err) {\n if (err) return onError(err)\n return doneOne()\n })\n } else {\n doneOne()\n }\n })\n })\n }\n\n function rmFile (file, done) {\n fs.unlink(file, function (err) {\n if (err) return onError(err)\n return done()\n })\n }\n\n function onDir (dir) {\n var target = dir.name.replace(currentPath, targetPath.replace('$', '$$$$')) // escapes '$' with '$$'\n isWritable(target, function (writable) {\n if (writable) {\n return mkDir(dir, target)\n }\n copyDir(dir.name)\n })\n }\n\n function mkDir (dir, target) {\n fs.mkdir(target, dir.mode, function (err) {\n if (err) return onError(err)\n // despite setting mode in fs.mkdir, doesn't seem to work\n // so we set it here.\n fs.chmod(target, dir.mode, function (err) {\n if (err) return onError(err)\n copyDir(dir.name)\n })\n })\n }\n\n function copyDir (dir) {\n fs.readdir(dir, function (err, items) {\n if (err) return onError(err)\n items.forEach(function (item) {\n startCopy(path.join(dir, item))\n })\n return doneOne()\n })\n }\n\n function onLink (link) {\n var target = link.replace(currentPath, targetPath)\n fs.readlink(link, function (err, resolvedPath) {\n if (err) return onError(err)\n checkLink(resolvedPath, target)\n })\n }\n\n function checkLink (resolvedPath, target) {\n if (dereference) {\n resolvedPath = path.resolve(basePath, resolvedPath)\n }\n isWritable(target, function (writable) {\n if (writable) {\n return makeLink(resolvedPath, target)\n }\n fs.readlink(target, function (err, targetDest) {\n if (err) return onError(err)\n\n if (dereference) {\n targetDest = path.resolve(basePath, targetDest)\n }\n if (targetDest === resolvedPath) {\n return doneOne()\n }\n return rmFile(target, function () {\n makeLink(resolvedPath, target)\n })\n })\n })\n }\n\n function makeLink (linkPath, target) {\n fs.symlink(linkPath, target, function (err) {\n if (err) return onError(err)\n return doneOne()\n })\n }\n\n function isWritable (path, done) {\n fs.lstat(path, function (err) {\n if (err) {\n if (err.code === 'ENOENT') return done(true)\n return done(false)\n }\n return done(false)\n })\n }\n\n function onError (err) {\n // ensure callback is defined & called only once:\n if (!errored && callback !== undefined) {\n errored = true\n return callback(err)\n }\n }\n\n function doneOne (skipped) {\n if (!skipped) running--\n finished++\n if ((started === finished) && (running === 0)) {\n if (callback !== undefined) {\n return callback(null)\n }\n }\n }\n}\n\nmodule.exports = ncp\n","module.exports = require(\"os\");","'use strict'\n\nconst path = require('path')\n\n// get drive on windows\nfunction getRootPath (p) {\n p = path.normalize(path.resolve(p)).split(path.sep)\n if (p.length > 0) return p[0]\n return null\n}\n\n// http://stackoverflow.com/a/62888/10333 contains more accurate\n// TODO: expand to include the rest\nconst INVALID_PATH_CHARS = /[<>:\"|?*]/\n\nfunction invalidWin32Path (p) {\n const rp = getRootPath(p)\n p = p.replace(rp, '')\n return INVALID_PATH_CHARS.test(p)\n}\n\nmodule.exports = {\n getRootPath,\n invalidWin32Path\n}\n","module.exports = {\n copySync: require('./copy-sync')\n}\n","/* eslint-disable node/no-deprecated-api */\nmodule.exports = function (size) {\n if (typeof Buffer.allocUnsafe === 'function') {\n try {\n return Buffer.allocUnsafe(size)\n } catch (e) {\n return new Buffer(size)\n }\n }\n return new Buffer(size)\n}\n","/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n","module.exports = require(\"buffer\");","'use strict'\n\nconst assert = require('assert')\nconst Buffer = require('buffer').Buffer\nconst binding = process.binding('zlib')\n\nconst constants = exports.constants = require('./constants.js')\nconst MiniPass = require('minipass')\n\nclass ZlibError extends Error {\n constructor (msg, errno) {\n super('zlib: ' + msg)\n this.errno = errno\n this.code = codes.get(errno)\n }\n\n get name () {\n return 'ZlibError'\n }\n}\n\n// translation table for return codes.\nconst codes = new Map([\n [constants.Z_OK, 'Z_OK'],\n [constants.Z_STREAM_END, 'Z_STREAM_END'],\n [constants.Z_NEED_DICT, 'Z_NEED_DICT'],\n [constants.Z_ERRNO, 'Z_ERRNO'],\n [constants.Z_STREAM_ERROR, 'Z_STREAM_ERROR'],\n [constants.Z_DATA_ERROR, 'Z_DATA_ERROR'],\n [constants.Z_MEM_ERROR, 'Z_MEM_ERROR'],\n [constants.Z_BUF_ERROR, 'Z_BUF_ERROR'],\n [constants.Z_VERSION_ERROR, 'Z_VERSION_ERROR']\n])\n\nconst validFlushFlags = new Set([\n constants.Z_NO_FLUSH,\n constants.Z_PARTIAL_FLUSH,\n constants.Z_SYNC_FLUSH,\n constants.Z_FULL_FLUSH,\n constants.Z_FINISH,\n constants.Z_BLOCK\n])\n\nconst strategies = new Set([\n constants.Z_FILTERED,\n constants.Z_HUFFMAN_ONLY,\n constants.Z_RLE,\n constants.Z_FIXED,\n constants.Z_DEFAULT_STRATEGY\n])\n\n// the Zlib class they all inherit from\n// This thing manages the queue of requests, and returns\n// true or false if there is anything in the queue when\n// you call the .write() method.\nconst _opts = Symbol('opts')\nconst _chunkSize = Symbol('chunkSize')\nconst _flushFlag = Symbol('flushFlag')\nconst _finishFlush = Symbol('finishFlush')\nconst _handle = Symbol('handle')\nconst _hadError = Symbol('hadError')\nconst _buffer = Symbol('buffer')\nconst _offset = Symbol('offset')\nconst _level = Symbol('level')\nconst _strategy = Symbol('strategy')\nconst _ended = Symbol('ended')\nconst _writeState = Symbol('writeState')\n\nclass Zlib extends MiniPass {\n constructor (opts, mode) {\n super(opts)\n this[_ended] = false\n this[_opts] = opts = opts || {}\n this[_chunkSize] = opts.chunkSize || constants.Z_DEFAULT_CHUNK\n if (opts.flush && !validFlushFlags.has(opts.flush)) {\n throw new TypeError('Invalid flush flag: ' + opts.flush)\n }\n if (opts.finishFlush && !validFlushFlags.has(opts.finishFlush)) {\n throw new TypeError('Invalid flush flag: ' + opts.finishFlush)\n }\n\n this[_flushFlag] = opts.flush || constants.Z_NO_FLUSH\n this[_finishFlush] = typeof opts.finishFlush !== 'undefined' ?\n opts.finishFlush : constants.Z_FINISH\n\n if (opts.chunkSize) {\n if (opts.chunkSize < constants.Z_MIN_CHUNK) {\n throw new RangeError('Invalid chunk size: ' + opts.chunkSize)\n }\n }\n\n if (opts.windowBits) {\n if (opts.windowBits < constants.Z_MIN_WINDOWBITS ||\n opts.windowBits > constants.Z_MAX_WINDOWBITS) {\n throw new RangeError('Invalid windowBits: ' + opts.windowBits)\n }\n }\n\n if (opts.level) {\n if (opts.level < constants.Z_MIN_LEVEL ||\n opts.level > constants.Z_MAX_LEVEL) {\n throw new RangeError('Invalid compression level: ' + opts.level)\n }\n }\n\n if (opts.memLevel) {\n if (opts.memLevel < constants.Z_MIN_MEMLEVEL ||\n opts.memLevel > constants.Z_MAX_MEMLEVEL) {\n throw new RangeError('Invalid memLevel: ' + opts.memLevel)\n }\n }\n\n if (opts.strategy && !(strategies.has(opts.strategy)))\n throw new TypeError('Invalid strategy: ' + opts.strategy)\n\n if (opts.dictionary) {\n if (!(opts.dictionary instanceof Buffer)) {\n throw new TypeError('Invalid dictionary: it should be a Buffer instance')\n }\n }\n\n this[_handle] = new binding.Zlib(mode)\n\n this[_hadError] = false\n this[_handle].onerror = (message, errno) => {\n // there is no way to cleanly recover.\n // continuing only obscures problems.\n this.close()\n this[_hadError] = true\n\n const error = new ZlibError(message, errno)\n this.emit('error', error)\n }\n\n const level = typeof opts.level === 'number' ? opts.level\n : constants.Z_DEFAULT_COMPRESSION\n\n var strategy = typeof opts.strategy === 'number' ? opts.strategy\n : constants.Z_DEFAULT_STRATEGY\n\n this[_writeState] = new Uint32Array(2);\n const window = opts.windowBits || constants.Z_DEFAULT_WINDOWBITS\n const memLevel = opts.memLevel || constants.Z_DEFAULT_MEMLEVEL\n\n // API changed in node v9\n /* istanbul ignore next */\n if (/^v[0-8]\\./.test(process.version)) {\n this[_handle].init(window,\n level,\n memLevel,\n strategy,\n opts.dictionary)\n } else {\n this[_handle].init(window,\n level,\n memLevel,\n strategy,\n this[_writeState],\n () => {},\n opts.dictionary)\n }\n\n this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])\n this[_offset] = 0\n this[_level] = level\n this[_strategy] = strategy\n\n this.once('end', this.close)\n }\n\n close () {\n if (this[_handle]) {\n this[_handle].close()\n this[_handle] = null\n this.emit('close')\n }\n }\n\n params (level, strategy) {\n if (!this[_handle])\n throw new Error('cannot switch params when binding is closed')\n\n // no way to test this without also not supporting params at all\n /* istanbul ignore if */\n if (!this[_handle].params)\n throw new Error('not supported in this implementation')\n\n if (level < constants.Z_MIN_LEVEL ||\n level > constants.Z_MAX_LEVEL) {\n throw new RangeError('Invalid compression level: ' + level)\n }\n\n if (!(strategies.has(strategy)))\n throw new TypeError('Invalid strategy: ' + strategy)\n\n if (this[_level] !== level || this[_strategy] !== strategy) {\n this.flush(constants.Z_SYNC_FLUSH)\n assert(this[_handle], 'zlib binding closed')\n this[_handle].params(level, strategy)\n /* istanbul ignore else */\n if (!this[_hadError]) {\n this[_level] = level\n this[_strategy] = strategy\n }\n }\n }\n\n reset () {\n assert(this[_handle], 'zlib binding closed')\n return this[_handle].reset()\n }\n\n flush (kind) {\n if (kind === undefined)\n kind = constants.Z_FULL_FLUSH\n\n if (this.ended)\n return\n\n const flushFlag = this[_flushFlag]\n this[_flushFlag] = kind\n this.write(Buffer.alloc(0))\n this[_flushFlag] = flushFlag\n }\n\n end (chunk, encoding, cb) {\n if (chunk)\n this.write(chunk, encoding)\n this.flush(this[_finishFlush])\n this[_ended] = true\n return super.end(null, null, cb)\n }\n\n get ended () {\n return this[_ended]\n }\n\n write (chunk, encoding, cb) {\n // process the chunk using the sync process\n // then super.write() all the outputted chunks\n if (typeof encoding === 'function')\n cb = encoding, encoding = 'utf8'\n\n if (typeof chunk === 'string')\n chunk = new Buffer(chunk, encoding)\n\n let availInBefore = chunk && chunk.length\n let availOutBefore = this[_chunkSize] - this[_offset]\n let inOff = 0 // the offset of the input buffer\n const flushFlag = this[_flushFlag]\n let writeReturn = true\n\n assert(this[_handle], 'zlib binding closed')\n do {\n let res = this[_handle].writeSync(\n flushFlag,\n chunk, // in\n inOff, // in_off\n availInBefore, // in_len\n this[_buffer], // out\n this[_offset], //out_off\n availOutBefore // out_len\n )\n\n if (this[_hadError])\n break\n\n // API changed in v9\n /* istanbul ignore next */\n let availInAfter = res ? res[0] : this[_writeState][1]\n /* istanbul ignore next */\n let availOutAfter = res ? res[1] : this[_writeState][0]\n\n const have = availOutBefore - availOutAfter\n assert(have >= 0, 'have should not go down')\n\n if (have > 0) {\n const out = this[_buffer].slice(\n this[_offset], this[_offset] + have\n )\n\n this[_offset] += have\n // serve some output to the consumer.\n writeReturn = super.write(out) && writeReturn\n }\n\n // exhausted the output buffer, or used all the input create a new one.\n if (availOutAfter === 0 || this[_offset] >= this[_chunkSize]) {\n availOutBefore = this[_chunkSize]\n this[_offset] = 0\n this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])\n }\n\n if (availOutAfter === 0) {\n // Not actually done. Need to reprocess.\n // Also, update the availInBefore to the availInAfter value,\n // so that if we have to hit it a third (fourth, etc.) time,\n // it'll have the correct byte counts.\n inOff += (availInBefore - availInAfter)\n availInBefore = availInAfter\n continue\n }\n break\n } while (!this[_hadError])\n\n if (cb)\n cb()\n return writeReturn\n }\n}\n\n// minimal 2-byte header\nclass Deflate extends Zlib {\n constructor (opts) {\n super(opts, constants.DEFLATE)\n }\n}\n\nclass Inflate extends Zlib {\n constructor (opts) {\n super(opts, constants.INFLATE)\n }\n}\n\n// gzip - bigger header, same deflate compression\nclass Gzip extends Zlib {\n constructor (opts) {\n super(opts, constants.GZIP)\n }\n}\n\nclass Gunzip extends Zlib {\n constructor (opts) {\n super(opts, constants.GUNZIP)\n }\n}\n\n// raw - no header\nclass DeflateRaw extends Zlib {\n constructor (opts) {\n super(opts, constants.DEFLATERAW)\n }\n}\n\nclass InflateRaw extends Zlib {\n constructor (opts) {\n super(opts, constants.INFLATERAW)\n }\n}\n\n// auto-detect header.\nclass Unzip extends Zlib {\n constructor (opts) {\n super(opts, constants.UNZIP)\n }\n}\n\nexports.Deflate = Deflate\nexports.Inflate = Inflate\nexports.Gzip = Gzip\nexports.Gunzip = Gunzip\nexports.DeflateRaw = DeflateRaw\nexports.InflateRaw = InflateRaw\nexports.Unzip = Unzip\n","'use strict'\nconst Buffer = require('./buffer.js')\nconst MiniPass = require('minipass')\nconst Pax = require('./pax.js')\nconst Header = require('./header.js')\nconst ReadEntry = require('./read-entry.js')\nconst fs = require('fs')\nconst path = require('path')\n\nconst types = require('./types.js')\nconst maxReadSize = 16 * 1024 * 1024\nconst PROCESS = Symbol('process')\nconst FILE = Symbol('file')\nconst DIRECTORY = Symbol('directory')\nconst SYMLINK = Symbol('symlink')\nconst HARDLINK = Symbol('hardlink')\nconst HEADER = Symbol('header')\nconst READ = Symbol('read')\nconst LSTAT = Symbol('lstat')\nconst ONLSTAT = Symbol('onlstat')\nconst ONREAD = Symbol('onread')\nconst ONREADLINK = Symbol('onreadlink')\nconst OPENFILE = Symbol('openfile')\nconst ONOPENFILE = Symbol('onopenfile')\nconst CLOSE = Symbol('close')\nconst MODE = Symbol('mode')\nconst warner = require('./warn-mixin.js')\nconst winchars = require('./winchars.js')\n\nconst modeFix = require('./mode-fix.js')\n\nconst WriteEntry = warner(class WriteEntry extends MiniPass {\n constructor (p, opt) {\n opt = opt || {}\n super(opt)\n if (typeof p !== 'string')\n throw new TypeError('path is required')\n this.path = p\n // suppress atime, ctime, uid, gid, uname, gname\n this.portable = !!opt.portable\n // until node has builtin pwnam functions, this'll have to do\n this.myuid = process.getuid && process.getuid()\n this.myuser = process.env.USER || ''\n this.maxReadSize = opt.maxReadSize || maxReadSize\n this.linkCache = opt.linkCache || new Map()\n this.statCache = opt.statCache || new Map()\n this.preservePaths = !!opt.preservePaths\n this.cwd = opt.cwd || process.cwd()\n this.strict = !!opt.strict\n this.noPax = !!opt.noPax\n this.noMtime = !!opt.noMtime\n this.mtime = opt.mtime || null\n\n if (typeof opt.onwarn === 'function')\n this.on('warn', opt.onwarn)\n\n if (!this.preservePaths && path.win32.isAbsolute(p)) {\n // absolutes on posix are also absolutes on win32\n // so we only need to test this one to get both\n const parsed = path.win32.parse(p)\n this.warn('stripping ' + parsed.root + ' from absolute path', p)\n this.path = p.substr(parsed.root.length)\n }\n\n this.win32 = !!opt.win32 || process.platform === 'win32'\n if (this.win32) {\n this.path = winchars.decode(this.path.replace(/\\\\/g, '/'))\n p = p.replace(/\\\\/g, '/')\n }\n\n this.absolute = opt.absolute || path.resolve(this.cwd, p)\n\n if (this.path === '')\n this.path = './'\n\n if (this.statCache.has(this.absolute))\n this[ONLSTAT](this.statCache.get(this.absolute))\n else\n this[LSTAT]()\n }\n\n [LSTAT] () {\n fs.lstat(this.absolute, (er, stat) => {\n if (er)\n return this.emit('error', er)\n this[ONLSTAT](stat)\n })\n }\n\n [ONLSTAT] (stat) {\n this.statCache.set(this.absolute, stat)\n this.stat = stat\n if (!stat.isFile())\n stat.size = 0\n this.type = getType(stat)\n this.emit('stat', stat)\n this[PROCESS]()\n }\n\n [PROCESS] () {\n switch (this.type) {\n case 'File': return this[FILE]()\n case 'Directory': return this[DIRECTORY]()\n case 'SymbolicLink': return this[SYMLINK]()\n // unsupported types are ignored.\n default: return this.end()\n }\n }\n\n [MODE] (mode) {\n return modeFix(mode, this.type === 'Directory')\n }\n\n [HEADER] () {\n if (this.type === 'Directory' && this.portable)\n this.noMtime = true\n\n this.header = new Header({\n path: this.path,\n linkpath: this.linkpath,\n // only the permissions and setuid/setgid/sticky bitflags\n // not the higher-order bits that specify file type\n mode: this[MODE](this.stat.mode),\n uid: this.portable ? null : this.stat.uid,\n gid: this.portable ? null : this.stat.gid,\n size: this.stat.size,\n mtime: this.noMtime ? null : this.mtime || this.stat.mtime,\n type: this.type,\n uname: this.portable ? null :\n this.stat.uid === this.myuid ? this.myuser : '',\n atime: this.portable ? null : this.stat.atime,\n ctime: this.portable ? null : this.stat.ctime\n })\n\n if (this.header.encode() && !this.noPax)\n this.write(new Pax({\n atime: this.portable ? null : this.header.atime,\n ctime: this.portable ? null : this.header.ctime,\n gid: this.portable ? null : this.header.gid,\n mtime: this.noMtime ? null : this.mtime || this.header.mtime,\n path: this.path,\n linkpath: this.linkpath,\n size: this.header.size,\n uid: this.portable ? null : this.header.uid,\n uname: this.portable ? null : this.header.uname,\n dev: this.portable ? null : this.stat.dev,\n ino: this.portable ? null : this.stat.ino,\n nlink: this.portable ? null : this.stat.nlink\n }).encode())\n this.write(this.header.block)\n }\n\n [DIRECTORY] () {\n if (this.path.substr(-1) !== '/')\n this.path += '/'\n this.stat.size = 0\n this[HEADER]()\n this.end()\n }\n\n [SYMLINK] () {\n fs.readlink(this.absolute, (er, linkpath) => {\n if (er)\n return this.emit('error', er)\n this[ONREADLINK](linkpath)\n })\n }\n\n [ONREADLINK] (linkpath) {\n this.linkpath = linkpath\n this[HEADER]()\n this.end()\n }\n\n [HARDLINK] (linkpath) {\n this.type = 'Link'\n this.linkpath = path.relative(this.cwd, linkpath)\n this.stat.size = 0\n this[HEADER]()\n this.end()\n }\n\n [FILE] () {\n if (this.stat.nlink > 1) {\n const linkKey = this.stat.dev + ':' + this.stat.ino\n if (this.linkCache.has(linkKey)) {\n const linkpath = this.linkCache.get(linkKey)\n if (linkpath.indexOf(this.cwd) === 0)\n return this[HARDLINK](linkpath)\n }\n this.linkCache.set(linkKey, this.absolute)\n }\n\n this[HEADER]()\n if (this.stat.size === 0)\n return this.end()\n\n this[OPENFILE]()\n }\n\n [OPENFILE] () {\n fs.open(this.absolute, 'r', (er, fd) => {\n if (er)\n return this.emit('error', er)\n this[ONOPENFILE](fd)\n })\n }\n\n [ONOPENFILE] (fd) {\n const blockLen = 512 * Math.ceil(this.stat.size / 512)\n const bufLen = Math.min(blockLen, this.maxReadSize)\n const buf = Buffer.allocUnsafe(bufLen)\n this[READ](fd, buf, 0, buf.length, 0, this.stat.size, blockLen)\n }\n\n [READ] (fd, buf, offset, length, pos, remain, blockRemain) {\n fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {\n if (er)\n return this[CLOSE](fd, _ => this.emit('error', er))\n this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)\n })\n }\n\n [CLOSE] (fd, cb) {\n fs.close(fd, cb)\n }\n\n [ONREAD] (fd, buf, offset, length, pos, remain, blockRemain, bytesRead) {\n if (bytesRead <= 0 && remain > 0) {\n const er = new Error('unexpected EOF')\n er.path = this.absolute\n er.syscall = 'read'\n er.code = 'EOF'\n this.emit('error', er)\n }\n\n // null out the rest of the buffer, if we could fit the block padding\n if (bytesRead === remain) {\n for (let i = bytesRead; i < length && bytesRead < blockRemain; i++) {\n buf[i + offset] = 0\n bytesRead ++\n remain ++\n }\n }\n\n const writeBuf = offset === 0 && bytesRead === buf.length ?\n buf : buf.slice(offset, offset + bytesRead)\n remain -= bytesRead\n blockRemain -= bytesRead\n pos += bytesRead\n offset += bytesRead\n\n this.write(writeBuf)\n\n if (!remain) {\n if (blockRemain)\n this.write(Buffer.alloc(blockRemain))\n this.end()\n this[CLOSE](fd, _ => _)\n return\n }\n\n if (offset >= length) {\n buf = Buffer.allocUnsafe(length)\n offset = 0\n }\n length = buf.length - offset\n this[READ](fd, buf, offset, length, pos, remain, blockRemain)\n }\n})\n\nclass WriteEntrySync extends WriteEntry {\n constructor (path, opt) {\n super(path, opt)\n }\n\n [LSTAT] () {\n this[ONLSTAT](fs.lstatSync(this.absolute))\n }\n\n [SYMLINK] () {\n this[ONREADLINK](fs.readlinkSync(this.absolute))\n }\n\n [OPENFILE] () {\n this[ONOPENFILE](fs.openSync(this.absolute, 'r'))\n }\n\n [READ] (fd, buf, offset, length, pos, remain, blockRemain) {\n let threw = true\n try {\n const bytesRead = fs.readSync(fd, buf, offset, length, pos)\n this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)\n threw = false\n } finally {\n if (threw)\n try { this[CLOSE](fd) } catch (er) {}\n }\n }\n\n [CLOSE] (fd) {\n fs.closeSync(fd)\n }\n}\n\nconst WriteEntryTar = warner(class WriteEntryTar extends MiniPass {\n constructor (readEntry, opt) {\n opt = opt || {}\n super(opt)\n this.preservePaths = !!opt.preservePaths\n this.portable = !!opt.portable\n this.strict = !!opt.strict\n this.noPax = !!opt.noPax\n this.noMtime = !!opt.noMtime\n\n this.readEntry = readEntry\n this.type = readEntry.type\n if (this.type === 'Directory' && this.portable)\n this.noMtime = true\n\n this.path = readEntry.path\n this.mode = this[MODE](readEntry.mode)\n this.uid = this.portable ? null : readEntry.uid\n this.gid = this.portable ? null : readEntry.gid\n this.uname = this.portable ? null : readEntry.uname\n this.gname = this.portable ? null : readEntry.gname\n this.size = readEntry.size\n this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime\n this.atime = this.portable ? null : readEntry.atime\n this.ctime = this.portable ? null : readEntry.ctime\n this.linkpath = readEntry.linkpath\n\n if (typeof opt.onwarn === 'function')\n this.on('warn', opt.onwarn)\n\n if (path.isAbsolute(this.path) && !this.preservePaths) {\n const parsed = path.parse(this.path)\n this.warn(\n 'stripping ' + parsed.root + ' from absolute path',\n this.path\n )\n this.path = this.path.substr(parsed.root.length)\n }\n\n this.remain = readEntry.size\n this.blockRemain = readEntry.startBlockSize\n\n this.header = new Header({\n path: this.path,\n linkpath: this.linkpath,\n // only the permissions and setuid/setgid/sticky bitflags\n // not the higher-order bits that specify file type\n mode: this.mode,\n uid: this.portable ? null : this.uid,\n gid: this.portable ? null : this.gid,\n size: this.size,\n mtime: this.noMtime ? null : this.mtime,\n type: this.type,\n uname: this.portable ? null : this.uname,\n atime: this.portable ? null : this.atime,\n ctime: this.portable ? null : this.ctime\n })\n\n if (this.header.encode() && !this.noPax)\n super.write(new Pax({\n atime: this.portable ? null : this.atime,\n ctime: this.portable ? null : this.ctime,\n gid: this.portable ? null : this.gid,\n mtime: this.noMtime ? null : this.mtime,\n path: this.path,\n linkpath: this.linkpath,\n size: this.size,\n uid: this.portable ? null : this.uid,\n uname: this.portable ? null : this.uname,\n dev: this.portable ? null : this.readEntry.dev,\n ino: this.portable ? null : this.readEntry.ino,\n nlink: this.portable ? null : this.readEntry.nlink\n }).encode())\n\n super.write(this.header.block)\n readEntry.pipe(this)\n }\n\n [MODE] (mode) {\n return modeFix(mode, this.type === 'Directory')\n }\n\n write (data) {\n const writeLen = data.length\n if (writeLen > this.blockRemain)\n throw new Error('writing more to entry than is appropriate')\n this.blockRemain -= writeLen\n return super.write(data)\n }\n\n end () {\n if (this.blockRemain)\n this.write(Buffer.alloc(this.blockRemain))\n return super.end()\n }\n})\n\nWriteEntry.Sync = WriteEntrySync\nWriteEntry.Tar = WriteEntryTar\n\nconst getType = stat =>\n stat.isFile() ? 'File'\n : stat.isDirectory() ? 'Directory'\n : stat.isSymbolicLink() ? 'SymbolicLink'\n : 'Unsupported'\n\nmodule.exports = WriteEntry\n","'use strict'\n\n// When writing files on Windows, translate the characters to their\n// 0xf000 higher-encoded versions.\n\nconst raw = [\n '|',\n '<',\n '>',\n '?',\n ':'\n]\n\nconst win = raw.map(char =>\n String.fromCharCode(0xf000 + char.charCodeAt(0)))\n\nconst toWin = new Map(raw.map((char, i) => [char, win[i]]))\nconst toRaw = new Map(win.map((char, i) => [char, raw[i]]))\n\nmodule.exports = {\n encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),\n decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s)\n}\n","'use strict'\nconst Buffer = require('./buffer.js')\n\n// tar -r\nconst hlo = require('./high-level-opt.js')\nconst Pack = require('./pack.js')\nconst Parse = require('./parse.js')\nconst fs = require('fs')\nconst fsm = require('fs-minipass')\nconst t = require('./list.js')\nconst path = require('path')\n\n// starting at the head of the file, read a Header\n// If the checksum is invalid, that's our position to start writing\n// If it is, jump forward by the specified size (round up to 512)\n// and try again.\n// Write the new Pack stream starting there.\n\nconst Header = require('./header.js')\n\nconst r = module.exports = (opt_, files, cb) => {\n const opt = hlo(opt_)\n\n if (!opt.file)\n throw new TypeError('file is required')\n\n if (opt.gzip)\n throw new TypeError('cannot append to compressed archives')\n\n if (!files || !Array.isArray(files) || !files.length)\n throw new TypeError('no files or directories specified')\n\n files = Array.from(files)\n\n return opt.sync ? replaceSync(opt, files)\n : replace(opt, files, cb)\n}\n\nconst replaceSync = (opt, files) => {\n const p = new Pack.Sync(opt)\n\n let threw = true\n let fd\n let position\n\n try {\n try {\n fd = fs.openSync(opt.file, 'r+')\n } catch (er) {\n if (er.code === 'ENOENT')\n fd = fs.openSync(opt.file, 'w+')\n else\n throw er\n }\n\n const st = fs.fstatSync(fd)\n const headBuf = Buffer.alloc(512)\n\n POSITION: for (position = 0; position < st.size; position += 512) {\n for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {\n bytes = fs.readSync(\n fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos\n )\n\n if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)\n throw new Error('cannot append to compressed archives')\n\n if (!bytes)\n break POSITION\n }\n\n let h = new Header(headBuf)\n if (!h.cksumValid)\n break\n let entryBlockSize = 512 * Math.ceil(h.size / 512)\n if (position + entryBlockSize + 512 > st.size)\n break\n // the 512 for the header we just parsed will be added as well\n // also jump ahead all the blocks for the body\n position += entryBlockSize\n if (opt.mtimeCache)\n opt.mtimeCache.set(h.path, h.mtime)\n }\n threw = false\n\n streamSync(opt, p, position, fd, files)\n } finally {\n if (threw)\n try { fs.closeSync(fd) } catch (er) {}\n }\n}\n\nconst streamSync = (opt, p, position, fd, files) => {\n const stream = new fsm.WriteStreamSync(opt.file, {\n fd: fd,\n start: position\n })\n p.pipe(stream)\n addFilesSync(p, files)\n}\n\nconst replace = (opt, files, cb) => {\n files = Array.from(files)\n const p = new Pack(opt)\n\n const getPos = (fd, size, cb_) => {\n const cb = (er, pos) => {\n if (er)\n fs.close(fd, _ => cb_(er))\n else\n cb_(null, pos)\n }\n\n let position = 0\n if (size === 0)\n return cb(null, 0)\n\n let bufPos = 0\n const headBuf = Buffer.alloc(512)\n const onread = (er, bytes) => {\n if (er)\n return cb(er)\n bufPos += bytes\n if (bufPos < 512 && bytes)\n return fs.read(\n fd, headBuf, bufPos, headBuf.length - bufPos,\n position + bufPos, onread\n )\n\n if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)\n return cb(new Error('cannot append to compressed archives'))\n\n // truncated header\n if (bufPos < 512)\n return cb(null, position)\n\n const h = new Header(headBuf)\n if (!h.cksumValid)\n return cb(null, position)\n\n const entryBlockSize = 512 * Math.ceil(h.size / 512)\n if (position + entryBlockSize + 512 > size)\n return cb(null, position)\n\n position += entryBlockSize + 512\n if (position >= size)\n return cb(null, position)\n\n if (opt.mtimeCache)\n opt.mtimeCache.set(h.path, h.mtime)\n bufPos = 0\n fs.read(fd, headBuf, 0, 512, position, onread)\n }\n fs.read(fd, headBuf, 0, 512, position, onread)\n }\n\n const promise = new Promise((resolve, reject) => {\n p.on('error', reject)\n let flag = 'r+'\n const onopen = (er, fd) => {\n if (er && er.code === 'ENOENT' && flag === 'r+') {\n flag = 'w+'\n return fs.open(opt.file, flag, onopen)\n }\n\n if (er)\n return reject(er)\n\n fs.fstat(fd, (er, st) => {\n if (er)\n return reject(er)\n getPos(fd, st.size, (er, position) => {\n if (er)\n return reject(er)\n const stream = new fsm.WriteStream(opt.file, {\n fd: fd,\n start: position\n })\n p.pipe(stream)\n stream.on('error', reject)\n stream.on('close', resolve)\n addFilesAsync(p, files)\n })\n })\n }\n fs.open(opt.file, flag, onopen)\n })\n\n return cb ? promise.then(cb, cb) : promise\n}\n\nconst addFilesSync = (p, files) => {\n files.forEach(file => {\n if (file.charAt(0) === '@')\n t({\n file: path.resolve(p.cwd, file.substr(1)),\n sync: true,\n noResume: true,\n onentry: entry => p.add(entry)\n })\n else\n p.add(file)\n })\n p.end()\n}\n\nconst addFilesAsync = (p, files) => {\n while (files.length) {\n const file = files.shift()\n if (file.charAt(0) === '@')\n return t({\n file: path.resolve(p.cwd, file.substr(1)),\n noResume: true,\n onentry: entry => p.add(entry)\n }).then(_ => addFilesAsync(p, files))\n else\n p.add(file)\n }\n p.end()\n}\n","'use strict'\n\nconst assert = require('assert')\nconst EE = require('events').EventEmitter\nconst Parser = require('./parse.js')\nconst fs = require('fs')\nconst fsm = require('fs-minipass')\nconst path = require('path')\nconst mkdir = require('./mkdir.js')\nconst mkdirSync = mkdir.sync\nconst wc = require('./winchars.js')\n\nconst ONENTRY = Symbol('onEntry')\nconst CHECKFS = Symbol('checkFs')\nconst ISREUSABLE = Symbol('isReusable')\nconst MAKEFS = Symbol('makeFs')\nconst FILE = Symbol('file')\nconst DIRECTORY = Symbol('directory')\nconst LINK = Symbol('link')\nconst SYMLINK = Symbol('symlink')\nconst HARDLINK = Symbol('hardlink')\nconst UNSUPPORTED = Symbol('unsupported')\nconst UNKNOWN = Symbol('unknown')\nconst CHECKPATH = Symbol('checkPath')\nconst MKDIR = Symbol('mkdir')\nconst ONERROR = Symbol('onError')\nconst PENDING = Symbol('pending')\nconst PEND = Symbol('pend')\nconst UNPEND = Symbol('unpend')\nconst ENDED = Symbol('ended')\nconst MAYBECLOSE = Symbol('maybeClose')\nconst SKIP = Symbol('skip')\nconst DOCHOWN = Symbol('doChown')\nconst UID = Symbol('uid')\nconst GID = Symbol('gid')\nconst crypto = require('crypto')\n\n// Unlinks on Windows are not atomic.\n//\n// This means that if you have a file entry, followed by another\n// file entry with an identical name, and you cannot re-use the file\n// (because it's a hardlink, or because unlink:true is set, or it's\n// Windows, which does not have useful nlink values), then the unlink\n// will be committed to the disk AFTER the new file has been written\n// over the old one, deleting the new file.\n//\n// To work around this, on Windows systems, we rename the file and then\n// delete the renamed file. It's a sloppy kludge, but frankly, I do not\n// know of a better way to do this, given windows' non-atomic unlink\n// semantics.\n//\n// See: https://github.com/npm/node-tar/issues/183\n/* istanbul ignore next */\nconst unlinkFile = (path, cb) => {\n if (process.platform !== 'win32')\n return fs.unlink(path, cb)\n\n const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')\n fs.rename(path, name, er => {\n if (er)\n return cb(er)\n fs.unlink(name, cb)\n })\n}\n\n/* istanbul ignore next */\nconst unlinkFileSync = path => {\n if (process.platform !== 'win32')\n return fs.unlinkSync(path)\n\n const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')\n fs.renameSync(path, name)\n fs.unlinkSync(name)\n}\n\n// this.gid, entry.gid, this.processUid\nconst uint32 = (a, b, c) =>\n a === a >>> 0 ? a\n : b === b >>> 0 ? b\n : c\n\nclass Unpack extends Parser {\n constructor (opt) {\n if (!opt)\n opt = {}\n\n opt.ondone = _ => {\n this[ENDED] = true\n this[MAYBECLOSE]()\n }\n\n super(opt)\n\n this.transform = typeof opt.transform === 'function' ? opt.transform : null\n\n this.writable = true\n this.readable = false\n\n this[PENDING] = 0\n this[ENDED] = false\n\n this.dirCache = opt.dirCache || new Map()\n\n if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {\n // need both or neither\n if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')\n throw new TypeError('cannot set owner without number uid and gid')\n if (opt.preserveOwner)\n throw new TypeError(\n 'cannot preserve owner in archive and also set owner explicitly')\n this.uid = opt.uid\n this.gid = opt.gid\n this.setOwner = true\n } else {\n this.uid = null\n this.gid = null\n this.setOwner = false\n }\n\n // default true for root\n if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')\n this.preserveOwner = process.getuid && process.getuid() === 0\n else\n this.preserveOwner = !!opt.preserveOwner\n\n this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?\n process.getuid() : null\n this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?\n process.getgid() : null\n\n // mostly just for testing, but useful in some cases.\n // Forcibly trigger a chown on every entry, no matter what\n this.forceChown = opt.forceChown === true\n\n // turn > this[ONENTRY](entry))\n }\n\n [MAYBECLOSE] () {\n if (this[ENDED] && this[PENDING] === 0) {\n this.emit('prefinish')\n this.emit('finish')\n this.emit('end')\n this.emit('close')\n }\n }\n\n [CHECKPATH] (entry) {\n if (this.strip) {\n const parts = entry.path.split(/\\/|\\\\/)\n if (parts.length < this.strip)\n return false\n entry.path = parts.slice(this.strip).join('/')\n }\n\n if (!this.preservePaths) {\n const p = entry.path\n if (p.match(/(^|\\/|\\\\)\\.\\.(\\\\|\\/|$)/)) {\n this.warn('path contains \\'..\\'', p)\n return false\n }\n\n // absolutes on posix are also absolutes on win32\n // so we only need to test this one to get both\n if (path.win32.isAbsolute(p)) {\n const parsed = path.win32.parse(p)\n this.warn('stripping ' + parsed.root + ' from absolute path', p)\n entry.path = p.substr(parsed.root.length)\n }\n }\n\n // only encode : chars that aren't drive letter indicators\n if (this.win32) {\n const parsed = path.win32.parse(entry.path)\n entry.path = parsed.root === '' ? wc.encode(entry.path)\n : parsed.root + wc.encode(entry.path.substr(parsed.root.length))\n }\n\n if (path.isAbsolute(entry.path))\n entry.absolute = entry.path\n else\n entry.absolute = path.resolve(this.cwd, entry.path)\n\n return true\n }\n\n [ONENTRY] (entry) {\n if (!this[CHECKPATH](entry))\n return entry.resume()\n\n assert.equal(typeof entry.absolute, 'string')\n\n switch (entry.type) {\n case 'Directory':\n case 'GNUDumpDir':\n if (entry.mode)\n entry.mode = entry.mode | 0o700\n\n case 'File':\n case 'OldFile':\n case 'ContiguousFile':\n case 'Link':\n case 'SymbolicLink':\n return this[CHECKFS](entry)\n\n case 'CharacterDevice':\n case 'BlockDevice':\n case 'FIFO':\n return this[UNSUPPORTED](entry)\n }\n }\n\n [ONERROR] (er, entry) {\n // Cwd has to exist, or else nothing works. That's serious.\n // Other errors are warnings, which raise the error in strict\n // mode, but otherwise continue on.\n if (er.name === 'CwdError')\n this.emit('error', er)\n else {\n this.warn(er.message, er)\n this[UNPEND]()\n entry.resume()\n }\n }\n\n [MKDIR] (dir, mode, cb) {\n mkdir(dir, {\n uid: this.uid,\n gid: this.gid,\n processUid: this.processUid,\n processGid: this.processGid,\n umask: this.processUmask,\n preserve: this.preservePaths,\n unlink: this.unlink,\n cache: this.dirCache,\n cwd: this.cwd,\n mode: mode\n }, cb)\n }\n\n [DOCHOWN] (entry) {\n // in preserve owner mode, chown if the entry doesn't match process\n // in set owner mode, chown if setting doesn't match process\n return this.forceChown ||\n this.preserveOwner &&\n ( typeof entry.uid === 'number' && entry.uid !== this.processUid ||\n typeof entry.gid === 'number' && entry.gid !== this.processGid )\n ||\n ( typeof this.uid === 'number' && this.uid !== this.processUid ||\n typeof this.gid === 'number' && this.gid !== this.processGid )\n }\n\n [UID] (entry) {\n return uint32(this.uid, entry.uid, this.processUid)\n }\n\n [GID] (entry) {\n return uint32(this.gid, entry.gid, this.processGid)\n }\n\n [FILE] (entry) {\n const mode = entry.mode & 0o7777 || this.fmode\n const stream = new fsm.WriteStream(entry.absolute, {\n mode: mode,\n autoClose: false\n })\n stream.on('error', er => this[ONERROR](er, entry))\n\n let actions = 1\n const done = er => {\n if (er)\n return this[ONERROR](er, entry)\n\n if (--actions === 0)\n fs.close(stream.fd, _ => this[UNPEND]())\n }\n\n stream.on('finish', _ => {\n // if futimes fails, try utimes\n // if utimes fails, fail with the original error\n // same for fchown/chown\n const abs = entry.absolute\n const fd = stream.fd\n\n if (entry.mtime && !this.noMtime) {\n actions++\n const atime = entry.atime || new Date()\n const mtime = entry.mtime\n fs.futimes(fd, atime, mtime, er =>\n er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))\n : done())\n }\n\n if (this[DOCHOWN](entry)) {\n actions++\n const uid = this[UID](entry)\n const gid = this[GID](entry)\n fs.fchown(fd, uid, gid, er =>\n er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))\n : done())\n }\n\n done()\n })\n\n const tx = this.transform ? this.transform(entry) || entry : entry\n if (tx !== entry) {\n tx.on('error', er => this[ONERROR](er, entry))\n entry.pipe(tx)\n }\n tx.pipe(stream)\n }\n\n [DIRECTORY] (entry) {\n const mode = entry.mode & 0o7777 || this.dmode\n this[MKDIR](entry.absolute, mode, er => {\n if (er)\n return this[ONERROR](er, entry)\n\n let actions = 1\n const done = _ => {\n if (--actions === 0) {\n this[UNPEND]()\n entry.resume()\n }\n }\n\n if (entry.mtime && !this.noMtime) {\n actions++\n fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)\n }\n\n if (this[DOCHOWN](entry)) {\n actions++\n fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)\n }\n\n done()\n })\n }\n\n [UNSUPPORTED] (entry) {\n this.warn('unsupported entry type: ' + entry.type, entry)\n entry.resume()\n }\n\n [SYMLINK] (entry) {\n this[LINK](entry, entry.linkpath, 'symlink')\n }\n\n [HARDLINK] (entry) {\n this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link')\n }\n\n [PEND] () {\n this[PENDING]++\n }\n\n [UNPEND] () {\n this[PENDING]--\n this[MAYBECLOSE]()\n }\n\n [SKIP] (entry) {\n this[UNPEND]()\n entry.resume()\n }\n\n // Check if we can reuse an existing filesystem entry safely and\n // overwrite it, rather than unlinking and recreating\n // Windows doesn't report a useful nlink, so we just never reuse entries\n [ISREUSABLE] (entry, st) {\n return entry.type === 'File' &&\n !this.unlink &&\n st.isFile() &&\n st.nlink <= 1 &&\n process.platform !== 'win32'\n }\n\n // check if a thing is there, and if so, try to clobber it\n [CHECKFS] (entry) {\n this[PEND]()\n this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {\n if (er)\n return this[ONERROR](er, entry)\n fs.lstat(entry.absolute, (er, st) => {\n if (st && (this.keep || this.newer && st.mtime > entry.mtime))\n this[SKIP](entry)\n else if (er || this[ISREUSABLE](entry, st))\n this[MAKEFS](null, entry)\n else if (st.isDirectory()) {\n if (entry.type === 'Directory') {\n if (!entry.mode || (st.mode & 0o7777) === entry.mode)\n this[MAKEFS](null, entry)\n else\n fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))\n } else\n fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))\n } else\n unlinkFile(entry.absolute, er => this[MAKEFS](er, entry))\n })\n })\n }\n\n [MAKEFS] (er, entry) {\n if (er)\n return this[ONERROR](er, entry)\n\n switch (entry.type) {\n case 'File':\n case 'OldFile':\n case 'ContiguousFile':\n return this[FILE](entry)\n\n case 'Link':\n return this[HARDLINK](entry)\n\n case 'SymbolicLink':\n return this[SYMLINK](entry)\n\n case 'Directory':\n case 'GNUDumpDir':\n return this[DIRECTORY](entry)\n }\n }\n\n [LINK] (entry, linkpath, link) {\n // XXX: get the type ('file' or 'dir') for windows\n fs[link](linkpath, entry.absolute, er => {\n if (er)\n return this[ONERROR](er, entry)\n this[UNPEND]()\n entry.resume()\n })\n }\n}\n\nclass UnpackSync extends Unpack {\n constructor (opt) {\n super(opt)\n }\n\n [CHECKFS] (entry) {\n const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)\n if (er)\n return this[ONERROR](er, entry)\n try {\n const st = fs.lstatSync(entry.absolute)\n if (this.keep || this.newer && st.mtime > entry.mtime)\n return this[SKIP](entry)\n else if (this[ISREUSABLE](entry, st))\n return this[MAKEFS](null, entry)\n else {\n try {\n if (st.isDirectory()) {\n if (entry.type === 'Directory') {\n if (entry.mode && (st.mode & 0o7777) !== entry.mode)\n fs.chmodSync(entry.absolute, entry.mode)\n } else\n fs.rmdirSync(entry.absolute)\n } else\n unlinkFileSync(entry.absolute)\n return this[MAKEFS](null, entry)\n } catch (er) {\n return this[ONERROR](er, entry)\n }\n }\n } catch (er) {\n return this[MAKEFS](null, entry)\n }\n }\n\n [FILE] (entry) {\n const mode = entry.mode & 0o7777 || this.fmode\n\n const oner = er => {\n try { fs.closeSync(fd) } catch (_) {}\n if (er)\n this[ONERROR](er, entry)\n }\n\n let stream\n let fd\n try {\n fd = fs.openSync(entry.absolute, 'w', mode)\n } catch (er) {\n return oner(er)\n }\n const tx = this.transform ? this.transform(entry) || entry : entry\n if (tx !== entry) {\n tx.on('error', er => this[ONERROR](er, entry))\n entry.pipe(tx)\n }\n\n tx.on('data', chunk => {\n try {\n fs.writeSync(fd, chunk, 0, chunk.length)\n } catch (er) {\n oner(er)\n }\n })\n\n tx.on('end', _ => {\n let er = null\n // try both, falling futimes back to utimes\n // if either fails, handle the first error\n if (entry.mtime && !this.noMtime) {\n const atime = entry.atime || new Date()\n const mtime = entry.mtime\n try {\n fs.futimesSync(fd, atime, mtime)\n } catch (futimeser) {\n try {\n fs.utimesSync(entry.absolute, atime, mtime)\n } catch (utimeser) {\n er = futimeser\n }\n }\n }\n\n if (this[DOCHOWN](entry)) {\n const uid = this[UID](entry)\n const gid = this[GID](entry)\n\n try {\n fs.fchownSync(fd, uid, gid)\n } catch (fchowner) {\n try {\n fs.chownSync(entry.absolute, uid, gid)\n } catch (chowner) {\n er = er || fchowner\n }\n }\n }\n\n oner(er)\n })\n }\n\n [DIRECTORY] (entry) {\n const mode = entry.mode & 0o7777 || this.dmode\n const er = this[MKDIR](entry.absolute, mode)\n if (er)\n return this[ONERROR](er, entry)\n if (entry.mtime && !this.noMtime) {\n try {\n fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)\n } catch (er) {}\n }\n if (this[DOCHOWN](entry)) {\n try {\n fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))\n } catch (er) {}\n }\n entry.resume()\n }\n\n [MKDIR] (dir, mode) {\n try {\n return mkdir.sync(dir, {\n uid: this.uid,\n gid: this.gid,\n processUid: this.processUid,\n processGid: this.processGid,\n umask: this.processUmask,\n preserve: this.preservePaths,\n unlink: this.unlink,\n cache: this.dirCache,\n cwd: this.cwd,\n mode: mode\n })\n } catch (er) {\n return er\n }\n }\n\n [LINK] (entry, linkpath, link) {\n try {\n fs[link + 'Sync'](linkpath, entry.absolute)\n entry.resume()\n } catch (er) {\n return this[ONERROR](er, entry)\n }\n }\n}\n\nUnpack.Sync = UnpackSync\nmodule.exports = Unpack\n","export const TOKEN_REF = '$jsii.byref';\nexport const TOKEN_DATE = '$jsii.date';\nexport const TOKEN_ENUM = '$jsii.enum';\n\nexport class ObjRef {\n [token: string]: string; // token = TOKEN_REF\n}\n\nexport interface Override {\n method?: string;\n property?: string;\n cookie?: string;\n}\n\nexport interface Callback {\n cbid: string;\n cookie: string | undefined;\n invoke?: InvokeRequest;\n get?: GetRequest;\n set?: SetRequest;\n}\n\nexport interface HelloResponse {\n hello: string;\n}\n\nexport interface LoadRequest {\n /** The name of the assembly */\n name: string;\n\n /** Assembly version */\n version: string;\n\n /** The tarball of the package */\n tarball: string;\n}\n\nexport interface LoadResponse {\n assembly: string;\n types: number;\n}\n\nexport interface CreateRequest {\n fqn: string\n args?: any[]\n overrides?: Override[]\n}\n\n// tslint:disable-next-line:no-empty-interface\nexport interface CreateResponse extends ObjRef {\n\n}\n\nexport interface DelRequest {\n objref: ObjRef;\n}\n\n// tslint:disable-next-line:no-empty-interface\nexport interface DelResponse {\n\n}\n\nexport interface GetRequest {\n objref: ObjRef;\n property: string;\n}\n\nexport interface StaticGetRequest {\n fqn: string;\n property: string;\n}\n\nexport interface GetResponse {\n value: any;\n}\n\nexport interface StaticSetRequest {\n fqn: string;\n property: string;\n value: any;\n}\n\nexport interface SetRequest {\n objref: ObjRef;\n property: string;\n value: any;\n}\n\n// tslint:disable-next-line:no-empty-interface\nexport interface SetResponse {\n\n}\n\nexport interface StaticInvokeRequest {\n fqn: string;\n method: string;\n args?: any[];\n}\n\nexport interface InvokeRequest {\n objref: ObjRef;\n method: string;\n args?: any[];\n}\n\nexport interface InvokeResponse {\n result: any;\n}\n\nexport interface BeginRequest {\n objref: ObjRef;\n method: string;\n args?: any[];\n}\n\nexport interface BeginResponse {\n promiseid: string;\n}\n\nexport interface EndRequest {\n promiseid: string;\n}\n\nexport interface EndResponse {\n result: any;\n}\n\n// tslint:disable-next-line:no-empty-interface\nexport interface CallbacksRequest {\n}\n\nexport interface CallbacksResponse {\n callbacks: Callback[];\n}\n\nexport interface CompleteRequest {\n cbid: string;\n err?: string;\n result?: any;\n}\n\nexport interface CompleteResponse {\n cbid: string;\n}\n\nexport interface NamingRequest {\n assembly: string;\n}\n\nexport interface NamingResponse {\n naming: { [language: string]: { [key: string]: any } | undefined };\n}\n\n// tslint:disable-next-line:no-empty-interface\nexport interface StatsRequest {\n}\n\nexport interface StatsResponse {\n objectCount: number;\n}\n\nexport type KernelRequest =\n LoadRequest |\n CreateRequest |\n DelRequest |\n GetRequest |\n SetRequest |\n InvokeRequest |\n BeginRequest |\n EndRequest |\n CallbacksRequest |\n CompleteRequest |\n NamingRequest |\n StatsRequest;\n\nexport type KernelResponse =\n HelloResponse |\n LoadResponse |\n CreateResponse |\n DelResponse |\n GetResponse |\n SetResponse |\n InvokeResponse |\n BeginResponse |\n EndResponse |\n CallbacksResponse |\n CompleteResponse |\n NamingResponse |\n StatsResponse;\n\nexport interface OkayResponse {\n ok: any;\n}\n\nexport interface ErrorResponse {\n error: string;\n stack?: string;\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst host_1 = require(\"./host\");\nconst in_out_1 = require(\"./in-out\");\nconst name = require('../package.json').name;\nconst version = require('../package.json').version;\nconst noStack = !!process.env.JSII_NOSTACK;\nconst debug = !!process.env.JSII_DEBUG;\nconst inout = new in_out_1.InputOutput();\nconst host = new host_1.KernelHost(inout, { debug, noStack });\n// say hello\ninout.write({ hello: `${name}@${version}` });\ninout.debug = debug; // we don't want \"hello\" emitted\nhost.run();\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst jsii_kernel_1 = require(\"jsii-kernel\");\nclass KernelHost {\n constructor(inout, opts = {}) {\n this.inout = inout;\n this.opts = opts;\n this.kernel = new jsii_kernel_1.Kernel(cb => this.callbackHandler(cb));\n this.kernel.traceEnabled = opts.debug ? true : false;\n }\n callbackHandler(callback) {\n // write a \"callback\" response, which is a special response that tells\n // the client that there's synchonous callback it needs to invoke and\n // bring back the result via a \"complete\" request.\n this.inout.write({ callback });\n const self = this;\n return completeCallback();\n function completeCallback() {\n const req = self.inout.read();\n if (!req) {\n throw new Error('Interrupted before callback returned');\n }\n // if this is a completion for the current callback, then we can\n // finally stop this nonsense and return the result.\n const completeReq = req;\n if ('complete' in completeReq && completeReq.complete.cbid === callback.cbid) {\n if (completeReq.complete.err) {\n throw new Error(completeReq.complete.err);\n }\n return completeReq.complete.result;\n }\n // otherwise, process the request normally, but continue to wait for\n // our callback to be completed. sync=true to enforce that `completeCallback`\n // will be called synchronously and return value will be chained back so we can \n // return it to the callback handler.\n return self.processRequest(req, completeCallback, /* sync */ true);\n }\n }\n run() {\n const req = this.inout.read();\n if (!req) {\n return; // done\n }\n this.processRequest(req, () => this.run());\n }\n /**\n * Processes the input request `req` and writes the output response to\n * stdout. This method invokes `next` when the request was fully processed.\n * This either happens synchronously or asynchronously depending on the api\n * (e.g. the \"end\" api will wait for an async promise to be fulfilled before\n * it writes the response)\n *\n * @param req The input request\n * @param next A callback to invoke to continue\n * @param sync If this is 'true', \"next\" must be called synchronously. This means\n * that we won't process any async activity (begin/complete). The kernel\n * doesn't allow any async operations during a sync callback, so this shouldn't\n * happen, so we assert in this case to find bugs.\n */\n processRequest(req, next, sync = false) {\n if ('callback' in req) {\n throw new Error('Unexpected `callback` result. This request should have been processed by a callback handler');\n }\n if (!('api' in req)) {\n throw new Error('Malformed request, \"api\" field is required');\n }\n const apiReq = req;\n const fn = this.findApi(apiReq.api);\n try {\n const ret = fn.call(this.kernel, req);\n // special case for 'begin' and 'complete' which are on an async\n // promise path. in order to allow the kernel to actually fulfill\n // the promise, and continue any async flows (which may potentially\n // start other promises), we respond only within a setImmediate\n // block, which is scheduled in the same micro-tasks queue as\n // promises. see the kernel test 'async overrides: two overrides'\n // for an example for this use case.\n if (apiReq.api === 'begin' || apiReq.api === 'complete') {\n checkIfAsyncIsAllowed();\n this.debug('processing pending promises before responding');\n setImmediate(() => {\n this.writeOkay(ret);\n next();\n });\n return;\n }\n // if this is an async method, return immediately and\n // call next only when the promise is fulfilled.\n if (this.isPromise(ret)) {\n checkIfAsyncIsAllowed();\n this.debug('waiting for promise to be fulfilled');\n const promise = ret;\n promise\n .then(ret => {\n this.debug('promise succeeded:', ret);\n this.writeOkay(ret);\n next();\n })\n .catch(e => {\n this.debug('promise failed:', e);\n this.writeError(e);\n next();\n });\n return;\n }\n this.writeOkay(ret);\n }\n catch (e) {\n this.writeError(e);\n }\n // indicate this request was processed (synchronously).\n return next();\n function checkIfAsyncIsAllowed() {\n if (sync) {\n throw new Error('Cannot handle async operations while waiting for a sync callback to return');\n }\n }\n }\n /**\n * Writes an \"ok\" result to stdout.\n */\n writeOkay(result) {\n const res = { ok: result };\n this.inout.write(res);\n }\n /**\n * Writes an \"error\" result to stdout.\n */\n writeError(error) {\n const res = { error: error.message, stack: undefined };\n if (!this.opts.noStack) {\n res.stack = error.stack;\n }\n this.inout.write(res);\n }\n /**\n * Returns true if the value is a promise.\n */\n isPromise(v) {\n return v && v.then && typeof (v.then) === 'function';\n }\n /**\n * Given a kernel api name, returns the function to invoke.\n */\n findApi(api) {\n const fn = this.kernel[api];\n if (typeof fn !== 'function') {\n throw new Error('Invalid kernel api call: ' + api);\n }\n return fn;\n }\n debug(...args) {\n if (!this.opts.debug)\n return;\n console.error(...args);\n }\n}\nexports.KernelHost = KernelHost;\n","export * from './kernel';\n\nimport * as api from './api';\nexport { api };","import * as fs from 'fs-extra';\nimport * as spec from 'jsii-spec';\nimport * as os from 'os';\nimport * as path from 'path';\nimport { SourceMapConsumer } from 'source-map';\nimport * as tar from 'tar';\nimport * as vm from 'vm';\nimport * as api from './api';\nimport { TOKEN_DATE, TOKEN_ENUM, TOKEN_REF } from './api';\n\n/**\n * Added to objects and contains the objid (the object reference).\n * Used to find the object id from an object.\n */\nconst OBJID_PROP = '$__jsii__objid__$';\nconst FQN_PROP = '$__jsii__fqn__$';\nconst PROXIES_PROP = '$__jsii__proxies__$';\nconst PROXY_REFERENT_PROP = '$__jsii__proxy_referent__$';\n\n/**\n * A special FQN that can be used to create empty javascript objects.\n */\nconst EMPTY_OBJECT_FQN = 'Object';\n\nexport class Kernel {\n /**\n * Set to true for verbose debugging.\n */\n public traceEnabled = false;\n\n private assemblies: { [name: string]: Assembly } = { };\n private objects: { [objid: string]: any } = { };\n private cbs: { [cbid: string]: Callback } = { };\n private waiting: { [cbid: string]: Callback } = { };\n private promises: { [prid: string]: AsyncInvocation } = { };\n private nextid = 10000; // incrementing counter for objid, cbid, promiseid\n private syncInProgress?: string; // forbids async calls (begin) while processing sync calls (get/set/invoke)\n private installDir?: string;\n\n private readonly sandbox: vm.Context;\n private readonly sourceMaps: { [assm: string]: SourceMapConsumer } = {};\n\n /**\n * Creates a jsii kernel object.\n *\n * @param callbackHandler This handler is invoked when a synchronous callback is called.\n * It's responsibility is to execute the callback and return it's\n * result (or throw an error).\n */\n constructor(public callbackHandler: (callback: api.Callback) => any) {\n // `setImmediate` is required for tests to pass (it is otherwise\n // impossible to wait for in-VM promises to complete)\n\n // `Buffer` is required when using simple-resource-bundler.\n\n // HACK: when we webpack jsii-runtime, all \"require\" statements get transpiled,\n // so modules can be resolved within the pack. However, here we actually want to\n // let loaded modules to use the native node \"require\" method.\n // I wonder if webpack has some pragma that allows opting-out at certain points\n // in the code.\n const moduleLoad = require('module').Module._load;\n const nodeRequire = (p: string) => moduleLoad(p, module, false);\n\n this.sandbox = vm.createContext({\n Buffer, // to use simple-resource-bundler\n setImmediate, // async tests\n require: nodeRequire // modules need to \"require\"\n });\n }\n\n public async load(req: api.LoadRequest): Promise {\n this._debug('load', req);\n\n if ('assembly' in req) {\n throw new Error('`assembly` field is deprecated for \"load\", use `name`, `version` and `tarball` instead');\n }\n\n if (!this.installDir) {\n this.installDir = await fs.mkdtemp(path.join(os.tmpdir(), 'jsii-kernel-'));\n await fs.mkdirp(path.join(this.installDir, 'node_modules'));\n this._debug('creating jsii-kernel modules workdir:', this.installDir);\n\n process.on('exit', () => {\n if (this.installDir) {\n this._debug('removing install dir', this.installDir);\n fs.removeSync(this.installDir); // can't use async version during exit\n }\n });\n }\n\n const pkgname = req.name;\n const pkgver = req.version;\n\n // check if we already have such a module\n const packageDir = path.join(this.installDir, 'node_modules', pkgname);\n if (await fs.pathExists(packageDir)) {\n // module exists, verify version\n const epkg = await fs.readJson(path.join(packageDir, 'package.json'));\n if (epkg.version !== pkgver) {\n throw new Error(`Multiple versions ${pkgver} and ${epkg.version} of the `\n + `package '${pkgname}' cannot be loaded together since this is unsupported by `\n + `some runtime environments`);\n }\n\n // same version, no-op\n this._debug('look up already-loaded assembly', pkgname);\n const assm = this.assemblies[pkgname];\n\n return {\n assembly: assm.metadata.name,\n types: Object.keys(assm.metadata.types || {}).length,\n };\n } else {\n // untar the archive to a staging directory, read the jsii spec from it\n // and then move it to the node_modules directory of the kernel.\n const staging = await fs.mkdtemp(path.join(os.tmpdir(), 'jsii-kernel-install-staging-'));\n try {\n await tar.extract({ strict: true, file: req.tarball, cwd: staging });\n\n // read .jsii metadata from the root of the package\n const jsiiMetadataFile = path.join(staging, 'package', spec.SPEC_FILE_NAME);\n if (!(await fs.pathExists(jsiiMetadataFile))) {\n throw new Error(`Package tarball ${req.tarball} must have a file named ${spec.SPEC_FILE_NAME} at the root`);\n }\n const assmSpec = await fs.readJson(jsiiMetadataFile) as spec.Assembly;\n\n // \"install\" to \"node_modules\" directory\n await fs.move(path.join(staging, 'package'), packageDir);\n\n // load the module and capture it's closure\n const closure = this._execute(`require(String.raw\\`${packageDir}\\`)`, packageDir);\n const assm = new Assembly(assmSpec, closure);\n this._addAssembly(assm);\n\n return {\n assembly: assmSpec.name,\n types: Object.keys(assmSpec.types || {}).length,\n };\n } finally {\n this._debug('removing staging directory:', staging);\n await fs.remove(staging);\n }\n }\n }\n\n public create(req: api.CreateRequest): api.CreateResponse {\n return this._create(req);\n }\n\n public del(req: api.DelRequest): api.DelResponse {\n const { objref } = req;\n\n this._debug('del', objref);\n const obj = this._findObject(objref); // make sure object exists\n delete this.objects[objref[TOKEN_REF]];\n\n if (obj[PROXY_REFERENT_PROP]) {\n // De-register the proxy if this was a proxy...\n delete obj[PROXY_REFERENT_PROP][PROXIES_PROP][obj[FQN_PROP]];\n }\n\n return { };\n }\n\n public sget(req: api.StaticGetRequest): api.GetResponse {\n const { fqn, property } = req;\n const symbol = `${fqn}.${property}`;\n this._debug('sget', symbol);\n const ti = this._typeInfoForProperty(fqn, property);\n\n if (!ti.static) {\n throw new Error(`property ${symbol} is not static`);\n }\n\n const prototype = this._findSymbol(fqn);\n\n const value = this._ensureSync(`property ${property}`, () =>\n this._wrapSandboxCode(() => prototype[property]));\n\n this._debug('value:', value);\n const ret = this._fromSandbox(value, ti.type);\n this._debug('ret', ret);\n return { value: ret };\n }\n\n public sset(req: api.StaticSetRequest): api.SetResponse {\n const { fqn, property, value } = req;\n const symbol = `${fqn}.${property}`;\n this._debug('sset', symbol);\n const ti = this._typeInfoForProperty(fqn, property);\n\n if (!ti.static) {\n throw new Error(`property ${symbol} is not static`);\n }\n\n if (ti.immutable) {\n throw new Error(`static property ${symbol} is readonly`);\n }\n\n const prototype = this._findSymbol(fqn);\n\n this._ensureSync(`property ${property}`, () =>\n this._wrapSandboxCode(() => prototype[property] = this._toSandbox(value)));\n\n return {};\n }\n\n public get(req: api.GetRequest): api.GetResponse {\n const { objref, property } = req;\n this._debug('get', objref, property);\n const obj = this._findObject(objref);\n const fqn = this._fqnForObject(obj);\n const ti = this._typeInfoForProperty(fqn, property);\n\n // if the property is overridden by the native code and \"get\" is called on the object, it\n // means that the native code is trying to access the \"super\" property. in order to enable\n // that, we actually keep a copy of the original property descriptor when we override,\n // so `findPropertyTarget` will return either the original property name (\"property\") or\n // the \"super\" property name (somehing like \"$jsii$super$$\").\n const propertyToGet = this._findPropertyTarget(obj, property);\n\n // make the actual \"get\", and block any async calls that might be performed\n // by jsii overrides.\n const value = this._ensureSync(`property '${objref[TOKEN_REF]}.${propertyToGet}'`,\n () => this._wrapSandboxCode(() => obj[propertyToGet]));\n this._debug('value:', value);\n const ret = this._fromSandbox(value, ti.type);\n this._debug('ret:', ret);\n return { value: ret };\n }\n\n public set(req: api.SetRequest): api.SetResponse {\n const { objref, property, value } = req;\n this._debug('set', objref, property, value);\n const obj = this._findObject(objref);\n\n const fqn = this._fqnForObject(obj);\n const propInfo = this._typeInfoForProperty(fqn, req.property);\n\n if (propInfo.immutable) {\n throw new Error(`Cannot set value of immutable property ${req.property} to ${req.value}`);\n }\n\n const propertyToSet = this._findPropertyTarget(obj, property);\n\n this._ensureSync(`property '${objref[TOKEN_REF]}.${propertyToSet}'`,\n () => this._wrapSandboxCode(() => obj[propertyToSet] = this._toSandbox(value)));\n\n return { };\n }\n\n public invoke(req: api.InvokeRequest): api.InvokeResponse {\n const { objref, method } = req;\n const args = req.args || [ ];\n\n this._debug('invoke', objref, method, args);\n const { ti, obj, fn } = this._findInvokeTarget(objref, method, args);\n\n // verify this is not an async method\n if (ti.returns && ti.returns.promise) {\n throw new Error(`${method} is an async method, use \"begin\" instead`);\n }\n\n const ret = this._ensureSync(`method '${objref[TOKEN_REF]}.${method}'`, () => {\n return this._wrapSandboxCode(() => fn.apply(obj, this._toSandboxValues(args)));\n });\n\n return { result: this._fromSandbox(ret, ti.returns) };\n }\n\n public sinvoke(req: api.StaticInvokeRequest): api.InvokeResponse {\n const { fqn, method } = req;\n const args = req.args || [ ];\n\n this._debug('sinvoke', fqn, method, args);\n\n const ti = this._typeInfoForMethod(fqn, method);\n\n if (!ti.static) {\n throw new Error(`${fqn}.${method} is not a static method`);\n }\n\n // verify this is not an async method\n if (ti.returns && ti.returns.promise) {\n throw new Error(`${method} is an async method, use \"begin\" instead`);\n }\n\n const prototype = this._findSymbol(fqn);\n const fn = prototype[method];\n\n const ret = this._ensureSync(`method '${fqn}.${method}'`, () => {\n return this._wrapSandboxCode(() => fn.apply(null, this._toSandboxValues(args)));\n });\n\n this._debug('method returned:', ret);\n return { result: this._fromSandbox(ret, ti.returns) };\n }\n\n public begin(req: api.BeginRequest): api.BeginResponse {\n const { objref, method } = req;\n const args = req.args || [ ];\n\n this._debug('begin', objref, method, args);\n\n if (this.syncInProgress) {\n // tslint:disable-next-line:max-line-length\n throw new Error(`Cannot invoke async method '${req.objref[TOKEN_REF]}.${req.method}' while sync ${this.syncInProgress} is being processed`);\n }\n\n const { ti, obj, fn } = this._findInvokeTarget(objref, method, args);\n\n // verify this is indeed an async method\n if (!ti.returns || !ti.returns.promise) {\n throw new Error(`Method ${method} is expected to be an async method`);\n }\n\n const promise = this._wrapSandboxCode(() => fn.apply(obj, this._toSandboxValues(args))) as Promise;\n\n // since we are planning to resolve this promise in a different scope\n // we need to handle rejections here [1]\n // [1]: https://stackoverflow.com/questions/40920179/should-i-refrain-from-handling-promise-rejection-asynchronously/40921505\n promise.catch(_ => undefined);\n\n const prid = this._makeprid();\n this.promises[prid] = {\n promise,\n method: ti\n };\n\n return { promiseid: prid };\n }\n\n public async end(req: api.EndRequest): Promise {\n const { promiseid } = req;\n\n this._debug('end', promiseid);\n\n const { promise, method } = this.promises[promiseid];\n if (!promise) {\n throw new Error(`Cannot find promise with ID: ${promiseid}`);\n }\n\n let result;\n try {\n result = await promise;\n this._debug('promise result:', result);\n } catch (e) {\n this._debug('promise error:', e);\n throw mapSource(e, this.sourceMaps);\n }\n\n return { result: this._fromSandbox(result, method.returns) };\n }\n\n public callbacks(_req?: api.CallbacksRequest): api.CallbacksResponse {\n this._debug('callbacks');\n const ret = Object.keys(this.cbs).map(cbid => {\n const cb = this.cbs[cbid];\n this.waiting[cbid] = cb; // move to waiting\n const callback: api.Callback = {\n cbid,\n cookie: cb.override.cookie,\n invoke: {\n objref: cb.objref,\n method: cb.override.method!,\n args: cb.args\n },\n };\n return callback;\n });\n\n // move all callbacks to the wait queue and clean the callback queue.\n this.cbs = { };\n return { callbacks: ret };\n }\n\n public complete(req: api.CompleteRequest): api.CompleteResponse {\n const { cbid, err, result } = req;\n\n this._debug('complete', cbid, err, result);\n\n if (!(cbid in this.waiting)) {\n throw new Error(`Callback ${cbid} not found`);\n }\n\n const cb = this.waiting[cbid];\n if (err) {\n this._debug('completed with error:', err);\n cb.fail(new Error(err));\n } else {\n const sandoxResult = this._toSandbox(result);\n this._debug('completed with result:', sandoxResult);\n cb.succeed(sandoxResult);\n }\n\n delete this.waiting[cbid];\n\n return { cbid };\n }\n\n /**\n * Returns the language-specific names for a jsii module.\n * @param assemblyName The name of the jsii module (i.e. jsii$jsii_calculator_lib$)\n */\n public naming(req: api.NamingRequest): api.NamingResponse {\n const assemblyName = req.assembly;\n\n this._debug('naming', assemblyName);\n\n const assembly = this._assemblyFor(assemblyName);\n const targets = assembly.metadata.targets;\n if (!targets) {\n throw new Error(`Unexpected - \"targets\" for ${assemblyName} is missing!`);\n }\n\n return { naming: targets };\n }\n\n public stats(_req?: api.StatsRequest): api.StatsResponse {\n return {\n objectCount: Object.keys(this.objects).length\n };\n }\n\n private _addAssembly(assm: Assembly) {\n this.assemblies[assm.metadata.name] = assm;\n\n // add the __jsii__.fqn property on every constructor. this allows\n // traversing between the javascript and jsii worlds given any object.\n for (const fqn of Object.keys(assm.metadata.types || {})) {\n const typedef = assm.metadata.types![fqn];\n switch (typedef.kind) {\n case spec.TypeKind.Interface:\n continue; // interfaces don't really exist\n case spec.TypeKind.Class:\n case spec.TypeKind.Enum:\n const constructor = this._findSymbol(fqn);\n constructor.__jsii__ = { fqn };\n }\n }\n }\n\n // find the javascript constructor function for a jsii FQN.\n private _findCtor(fqn: string, args: any[]) {\n if (fqn === EMPTY_OBJECT_FQN) {\n return Object;\n }\n\n const typeinfo = this._typeInfoForFqn(fqn);\n\n switch (typeinfo.kind) {\n case spec.TypeKind.Class:\n const classType = typeinfo as spec.ClassType;\n this._validateMethodArguments(classType.initializer, args);\n return this._findSymbol(fqn);\n\n case spec.TypeKind.Interface:\n return Object;\n\n default:\n throw new Error(`Unexpected FQN kind: ${fqn}`);\n }\n }\n\n // prefixed with _ to allow calling this method internally without\n // getting it recorded for testing.\n private _create(req: api.CreateRequest): api.CreateResponse {\n const { fqn, overrides } = req;\n\n const requestArgs = req.args || [];\n\n const ctor = this._findCtor(fqn, requestArgs);\n const obj = this._wrapSandboxCode(() => new ctor(...this._toSandboxValues(requestArgs)));\n const objref = this._createObjref(obj, fqn);\n\n // overrides: for each one of the override method names, installs a\n // method on the newly created object which represents the remote \"reverse proxy\".\n\n if (overrides) {\n this._debug('overrides', overrides);\n\n const overrideTypeErrorMessage = 'Override can either be \"method\" or \"property\"';\n const methods = new Set();\n const properties = new Set();\n\n for (const override of overrides) {\n if (override.method) {\n if (override.property) { throw new Error(overrideTypeErrorMessage); }\n if (methods.has(override.method)) { throw new Error(`Duplicate override for method '${override.method}'`); }\n\n methods.add(override.method);\n\n // check that the method being overridden actually exists\n let methodInfo;\n if (fqn !== EMPTY_OBJECT_FQN) {\n // error if we can find a property with this name\n if (this._tryTypeInfoForProperty(fqn, override.method)) {\n throw new Error(`Trying to override property '${override.method}' as a method`);\n }\n\n methodInfo = this._tryTypeInfoForMethod(fqn, override.method);\n }\n\n this._applyMethodOverride(obj, objref, override, methodInfo);\n } else if (override.property) {\n if (override.method) { throw new Error(overrideTypeErrorMessage); }\n if (properties.has(override.property)) { throw Error(`Duplicate override for property '${override.property}'`); }\n properties.add(override.property);\n\n let propInfo: spec.Property | undefined;\n if (fqn !== EMPTY_OBJECT_FQN) {\n // error if we can find a method with this name\n if (this._tryTypeInfoForMethod(fqn, override.property)) {\n throw new Error(`Trying to override method '${override.property}' as a property`);\n }\n\n propInfo = this._tryTypeInfoForProperty(fqn, override.property);\n }\n\n this._applyPropertyOverride(obj, objref, override, propInfo);\n } else {\n throw new Error(overrideTypeErrorMessage);\n }\n }\n }\n\n return objref;\n }\n\n private _getSuperPropertyName(name: string) {\n return `$jsii$super$${name}$`;\n }\n\n private _applyPropertyOverride(obj: any, objref: api.ObjRef, override: api.Override, propInfo?: spec.Property) {\n const self = this;\n const propertyName = override.property!;\n\n // if this is a private property (i.e. doesn't have `propInfo` the object has a key)\n if (!propInfo && propertyName in obj) {\n this._debug(`Skipping override of private property ${propertyName}`);\n return;\n }\n\n this._debug('apply override', propertyName);\n\n // save the old property under $jsii$super$$ so that property overrides\n // can still access it via `super.`.\n const prev = Object.getOwnPropertyDescriptor(obj, propertyName) || {\n value: undefined,\n writable: true,\n enumerable: true,\n configurable: true\n };\n\n const prevEnumerable = prev.enumerable;\n prev.enumerable = false;\n Object.defineProperty(obj, this._getSuperPropertyName(propertyName), prev);\n\n // we add callbacks for both 'get' and 'set', even if the property\n // is readonly. this is fine because if you try to set() a readonly\n // property, it will fail.\n Object.defineProperty(obj, propertyName, {\n enumerable: prevEnumerable,\n configurable: prev.configurable,\n get: () => {\n const result = self.callbackHandler({\n cookie: override.cookie,\n cbid: self._makecbid(),\n get: { objref, property: propertyName }\n });\n this._debug('callback returned', result);\n return this._toSandbox(result);\n },\n set: (value: any) => {\n self._debug('virtual set', objref, propertyName, { cookie: override.cookie });\n self.callbackHandler({\n cookie: override.cookie,\n cbid: self._makecbid(),\n set: { objref, property: propertyName, value: self._fromSandbox(value) }\n });\n }\n });\n }\n\n private _applyMethodOverride(obj: any, objref: api.ObjRef, override: api.Override, methodInfo?: spec.Method) {\n const self = this;\n const methodName = override.method!;\n\n // If this is a private method (doesn't have methodInfo, key resolves on the object), we\n // are going to skip the override.\n if (!methodInfo && obj[methodName]) {\n this._debug(`Skipping override of private method ${methodName}`);\n return;\n }\n\n // note that we are applying the override even if the method doesn't exist\n // on the type spec in order to allow native code to override methods from\n // interfaces.\n\n if (methodInfo && methodInfo.returns && methodInfo.returns.promise) {\n // async method override\n Object.defineProperty(obj, methodName, {\n enumerable: false,\n configurable: false,\n writable: false,\n value: (...methodArgs: any[]) => {\n self._debug('invoked async override', override);\n const args = self._toSandboxValues(methodArgs);\n return new Promise((succeed, fail) => {\n const cbid = self._makecbid();\n self._debug('adding callback to queue', cbid);\n self.cbs[cbid] = {\n objref,\n override,\n args,\n succeed,\n fail\n };\n });\n }\n });\n } else {\n // sync method override (method info is not required)\n Object.defineProperty(obj, methodName, {\n enumerable: false,\n configurable: false,\n writable: false,\n value: (...methodArgs: any[]) => {\n const result = self.callbackHandler({\n cookie: override.cookie,\n cbid: self._makecbid(),\n invoke: {\n objref,\n method: methodName,\n args: this._fromSandbox(methodArgs)\n }\n });\n return this._toSandbox(result);\n }\n });\n }\n }\n\n private _findInvokeTarget(objref: any, methodName: string, args: any[]) {\n const obj = this._findObject(objref);\n const fqn = this._fqnForObject(obj);\n const ti = this._typeInfoForMethod(fqn, methodName);\n this._validateMethodArguments(ti, args);\n\n // always first look up the method in the prototype. this practically bypasses\n // any methods overridden by derived classes (which are by definition native\n // methods). this serves to allow native call to invoke \"super.method()\" when\n // overriding the method.\n // if we didn't find the method on the prototype, it could be a literal object\n // that implements an interface, so we look if we have the method on the object\n // itself. if we do, we invoke it.\n let fn = obj.constructor.prototype[methodName];\n if (!fn) {\n fn = obj[methodName];\n if (!fn) {\n throw new Error(`Cannot find ${methodName} on object`);\n }\n }\n return { ti, obj, fn };\n }\n\n private _formatTypeRef(typeRef: spec.TypeReference): string {\n if (spec.isCollectionTypeReference(typeRef)) {\n return `${typeRef.collection.kind}<${this._formatTypeRef(typeRef.collection.elementtype)}>`;\n }\n\n if (spec.isNamedTypeReference(typeRef)) {\n return typeRef.fqn;\n }\n\n if (spec.isPrimitiveTypeReference(typeRef)) {\n return typeRef.primitive;\n }\n\n if (spec.isUnionTypeReference(typeRef)) {\n return typeRef.union.types.map(t => this._formatTypeRef(t)).join(' | ');\n }\n\n throw new Error(`Invalid type reference: ${JSON.stringify(typeRef)}`);\n }\n\n private _validateMethodArguments(method: spec.Method | undefined, args: any[]) {\n const params: spec.Parameter[] = (method && method.parameters) || [];\n\n // error if args > params\n if (args.length > params.length && !(method && method.variadic)) {\n throw new Error(`Too many arguments (method accepts ${params.length} parameters, got ${args.length} arguments)`);\n }\n\n for (let i = 0; i < params.length; ++i) {\n const param = params[i];\n const arg = args[i];\n\n if (param.variadic) {\n if (params.length <= i) { return; } // No vararg was provided\n for (let j = i ; j < params.length ; j++) {\n if (params[j] === undefined) {\n // tslint:disable-next-line:max-line-length\n throw new Error(`Unexpected 'undefined' value at index ${j - i} of variadic argument '${param.name}' of type '${this._formatTypeRef(param.type)}'`);\n }\n }\n } else if (!param.type.optional && arg === undefined) {\n // tslint:disable-next-line:max-line-length\n throw new Error(`Not enough arguments. Missing argument for the required parameter '${param.name}' of type '${this._formatTypeRef(param.type)}'`);\n }\n }\n }\n\n private _assemblyFor(assemblyName: string) {\n const assembly = this.assemblies[assemblyName];\n if (!assembly) {\n throw new Error(`Could not find assembly: ${assemblyName}`);\n }\n return assembly;\n }\n\n private _findSymbol(fqn: string) {\n const [ assemblyName, ...parts ] = fqn.split('.');\n const assembly = this._assemblyFor(assemblyName);\n\n let curr = assembly.closure;\n while (true) {\n const name = parts.shift();\n if (!name) {\n break;\n }\n\n curr = curr[name];\n }\n if (!curr) {\n throw new Error(`Could not find symbol ${fqn}`);\n }\n return curr;\n }\n\n private _createObjref(obj: any, fqn: string): api.ObjRef {\n const objid = this._mkobjid(fqn);\n Object.defineProperty(obj, OBJID_PROP, {\n value: objid,\n configurable: false,\n enumerable: false,\n writable: false\n });\n\n Object.defineProperty(obj, FQN_PROP, {\n value: fqn,\n configurable: false,\n enumerable: false,\n writable: false\n });\n\n this.objects[objid] = obj;\n return { [TOKEN_REF]: objid };\n }\n\n private _findObject(objref: api.ObjRef) {\n if (typeof(objref) !== 'object' || !(TOKEN_REF in objref)) {\n throw new Error(`Malformed object reference: ${JSON.stringify(objref)}`);\n }\n\n const objid = objref[TOKEN_REF];\n this._debug('findObject', objid);\n const obj = this.objects[objid];\n if (!obj) {\n throw new Error(`Object ${objid} not found`);\n }\n return obj;\n }\n\n private _typeInfoForFqn(fqn: string): spec.Type {\n const components = fqn.split('.');\n const moduleName = components[0];\n\n const assembly = this.assemblies[moduleName];\n if (!assembly) {\n throw new Error(`Module '${moduleName}' not found`);\n }\n\n const types = assembly.metadata.types || {};\n const fqnInfo = types[fqn];\n if (!fqnInfo) {\n throw new Error(`Type '${fqn}' not found`);\n }\n\n return fqnInfo;\n }\n\n private _typeInfoForMethod(fqn: string, methodName: string): spec.Method {\n const ti = this._tryTypeInfoForMethod(fqn, methodName);\n if (!ti) {\n throw new Error(`Class ${fqn} doesn't have a method '${methodName}'`);\n }\n return ti;\n }\n\n private _tryTypeInfoForMethod(fqn: string, methodName: string): spec.Method | undefined {\n const typeinfo = this._typeInfoForFqn(fqn);\n\n const methods = (typeinfo as (spec.ClassType | spec.InterfaceType)).methods || [];\n const bases = [\n (typeinfo as spec.ClassType).base,\n ...((typeinfo as spec.InterfaceType).interfaces || []) ];\n\n for (const m of methods) {\n if (m.name === methodName) {\n return m;\n }\n }\n\n // recursion to parent type (if exists)\n for (const base of bases) {\n if (!base) { continue; }\n\n const found = this._tryTypeInfoForMethod(base.fqn!, methodName);\n if (found) {\n return found;\n }\n }\n\n return undefined;\n }\n\n private _tryTypeInfoForProperty(fqn: string, property: string): spec.Property | undefined {\n if (!fqn) {\n throw new Error('missing \"fqn\"');\n }\n const typeInfo = this._typeInfoForFqn(fqn);\n\n let properties;\n let bases;\n\n if (spec.isClassType(typeInfo)) {\n const classTypeInfo = typeInfo as spec.ClassType;\n properties = classTypeInfo.properties;\n bases = classTypeInfo.base ? [ classTypeInfo.base.fqn ] : [];\n } else if (spec.isInterfaceType(typeInfo)) {\n const interfaceTypeInfo = typeInfo as spec.InterfaceType;\n properties = interfaceTypeInfo.properties;\n bases = (interfaceTypeInfo.interfaces || []).map(x => x.fqn);\n } else {\n throw new Error(`Type of kind ${typeInfo.kind} does not have properties`);\n }\n\n for (const p of properties || []) {\n if (p.name === property) {\n return p;\n }\n }\n\n // recurse to parent type (if exists)\n for (const baseFqn of bases) {\n const ret = this._tryTypeInfoForProperty(baseFqn, property);\n if (ret) {\n return ret;\n }\n }\n\n return undefined;\n }\n\n private _typeInfoForProperty(fqn: string, property: string): spec.Property {\n const typeInfo = this._tryTypeInfoForProperty(fqn, property);\n if (!typeInfo) {\n throw new Error(`Type ${fqn} doesn't have a property '${property}'`);\n }\n return typeInfo;\n }\n\n private _toSandbox(v: any): any {\n // undefined\n if (typeof v === 'undefined') {\n return undefined;\n }\n\n // null\n if (v === null) {\n return null;\n }\n\n // pointer\n if (typeof v === 'object' && TOKEN_REF in v) {\n return this._findObject(v);\n }\n\n // date\n if (typeof v === 'object' && TOKEN_DATE in v) {\n this._debug('Found date:', v);\n return new Date(v[TOKEN_DATE]);\n }\n\n // enums\n if (typeof v === 'object' && TOKEN_ENUM in v) {\n this._debug('Enum:', v);\n\n const value = v[TOKEN_ENUM] as string;\n const sep = value.lastIndexOf('/');\n if (sep === -1) {\n throw new Error(`Malformed enum value: ${v[TOKEN_ENUM]}`);\n }\n\n const typeName = value.substr(0, sep);\n const valueName = value.substr(sep + 1);\n\n const enumValue = this._findSymbol(typeName)[valueName];\n if (enumValue === undefined) {\n throw new Error(`No enum member named ${valueName} in ${typeName}`);\n }\n\n this._debug('resolved enum value:', enumValue);\n return enumValue;\n }\n\n // array\n if (Array.isArray(v)) {\n return v.map(x => this._toSandbox(x));\n }\n\n // map\n if (typeof v === 'object') {\n const out: any = { };\n for (const k of Object.keys(v)) {\n out[k] = this._toSandbox(v[k]);\n }\n return out;\n }\n\n // primitive\n return v;\n }\n\n private _fromSandbox(v: any, targetType?: spec.TypeReference): any {\n this._debug('fromSandbox', v, targetType);\n\n // undefined is returned as null: true\n if (typeof(v) === 'undefined') {\n return undefined;\n }\n\n // existing object\n const objid = v[OBJID_PROP];\n if (objid) {\n // object already has an objid, return it as a ref.\n this._debug('objref exists', objid);\n return { [TOKEN_REF]: objid };\n }\n\n // new object\n if (typeof(v) === 'object' && v.constructor.__jsii__) {\n // this is jsii object which was created inside the sandbox and still doesn't\n // have an object id, so we need to allocate one for it.\n this._debug('creating objref for', v);\n const fqn = this._fqnForObject(v);\n return this._createObjref(v, fqn);\n }\n\n // if the method/property returns an object literal and the return type\n // is a class, we create a new object based on the fqn and assign all keys.\n // so the client receives a real object.\n if (typeof(v) === 'object' && targetType && spec.isNamedTypeReference(targetType)) {\n this._debug('coalescing to', targetType);\n /*\n * We \"cache\" proxy instances in [PROXIES_PROP] so we can return an\n * identical object reference upon multiple accesses of the same\n * object literal under the same exposed type. This results in a\n * behavior that is more consistent with class instances.\n */\n const proxies: Proxies = v[PROXIES_PROP] = v[PROXIES_PROP] || {};\n if (!proxies[targetType.fqn]) {\n const handler = new KernelProxyHandler(v);\n const proxy = new Proxy(v, handler);\n // _createObjref will set the FQN_PROP & OBJID_PROP on the proxy.\n proxies[targetType.fqn] = { objRef: this._createObjref(proxy, targetType.fqn), handler };\n }\n return proxies[targetType.fqn].objRef;\n }\n\n // date (https://stackoverflow.com/a/643827/737957)\n if (typeof(v) === 'object' && Object.prototype.toString.call(v) === '[object Date]') {\n this._debug('date', v);\n return { [TOKEN_DATE]: v.toISOString() };\n }\n\n // array\n if (Array.isArray(v)) {\n this._debug('array', v);\n return v.map(x => this._fromSandbox(x));\n }\n\n if (targetType && spec.isNamedTypeReference(targetType)) {\n const propType = this._typeInfoForFqn(targetType.fqn);\n\n // enum\n if (propType.kind === spec.TypeKind.Enum) {\n this._debug('enum', v);\n const fqn = propType.fqn;\n\n const valueName = this._findSymbol(fqn)[v];\n\n return { [TOKEN_ENUM]: `${propType.fqn}/${valueName}` };\n }\n\n }\n\n // map\n if (typeof(v) === 'object') {\n this._debug('map', v);\n const out: any = { };\n for (const k of Object.keys(v)) {\n out[k] = this._fromSandbox(v[k]);\n }\n return out;\n }\n\n // primitive\n this._debug('primitive', v);\n return v;\n }\n\n private _toSandboxValues(args: any[]) {\n return args.map(v => this._toSandbox(v));\n }\n\n private _debug(...args: any[]) {\n if (this.traceEnabled) {\n // tslint:disable-next-line:no-console\n console.error.apply(console, [\n '[jsii-kernel]',\n args[0],\n ...args.slice(1)\n ]);\n }\n }\n\n /**\n * Ensures that `fn` is called and defends against beginning to invoke\n * async methods until fn finishes (successfully or not).\n */\n private _ensureSync(desc: string, fn: () => T): T {\n this.syncInProgress = desc;\n try {\n return fn();\n } catch (e) {\n throw e;\n } finally {\n delete this.syncInProgress;\n }\n }\n\n private _findPropertyTarget(obj: any, property: string) {\n const superProp = this._getSuperPropertyName(property);\n if (superProp in obj) {\n return superProp;\n } else {\n return property;\n }\n }\n\n //\n // type information\n //\n\n private _fqnForObject(obj: any) {\n if (FQN_PROP in obj) {\n return obj[FQN_PROP];\n }\n\n if (!obj.constructor.__jsii__) {\n throw new Error('No jsii type info for object');\n }\n\n return obj.constructor.__jsii__.fqn;\n }\n\n private _mkobjid(fqn: string) {\n return `${fqn}@${this.nextid++}`;\n }\n\n private _makecbid() {\n return `jsii::callback::${this.nextid++}`;\n }\n\n private _makeprid() {\n return `jsii::promise::${this.nextid++}`;\n }\n\n private _wrapSandboxCode(fn: () => T): T {\n try {\n return fn();\n } catch (err) {\n throw mapSource(err, this.sourceMaps);\n }\n }\n\n /**\n * Executes arbitrary code in a VM sandbox.\n *\n * @param code JavaScript code to be executed in the VM\n * @param sandbox a VM context to use for running the code\n * @param sourceMaps source maps to be used in case an exception is thrown\n * @param filename the file name to use for the executed code\n *\n * @returns the result of evaluating the code\n */\n private _execute(code: string, filename: string) {\n const script = new vm.Script(code, { filename });\n try {\n return script.runInContext(this.sandbox, { displayErrors: true });\n } catch (err) {\n throw mapSource(err, this.sourceMaps);\n }\n }\n}\n\ninterface Callback {\n objref: api.ObjRef;\n override: api.Override;\n args: any[];\n\n // completion callbacks\n succeed: (...args: any[]) => any;\n fail: (...args: any[]) => any;\n}\n\ninterface AsyncInvocation {\n method: spec.Method\n promise: Promise\n}\n\nclass Assembly {\n constructor(public readonly metadata: spec.Assembly,\n public readonly closure: any) {\n }\n}\n\n/**\n * Applies source maps to an error's stack trace and returns the mapped error,\n * and stitches stack trace elements to adapt the context to the current trace.\n *\n * @param err is the error to be mapped\n * @param sourceMaps the source maps to be used\n *\n * @returns the mapped error\n */\nfunction mapSource(err: Error, sourceMaps: { [assm: string]: SourceMapConsumer }): Error {\n if (!err.stack) { return err; }\n const oldFrames = err.stack.split(\"\\n\");\n const obj = { stack: '' };\n const previousLimit = Error.stackTraceLimit;\n try {\n Error.stackTraceLimit = err.stack.split(\"\\n\").length;\n Error.captureStackTrace(obj, mapSource);\n const realFrames = obj.stack.split('\\n').slice(1);\n const topFrame = realFrames[0].substring(0, realFrames[0].indexOf(' ('));\n err.stack = [\n ...oldFrames.slice(0, oldFrames.findIndex(frame => frame.startsWith(topFrame))).map(applyMaps),\n ...realFrames\n ].join(\"\\n\");\n return err;\n } finally {\n Error.stackTraceLimit = previousLimit;\n }\n\n function applyMaps(frame: string): string {\n const mappable = /^(\\s*at\\s+.+)\\(jsii\\/(.+)\\.js:(\\d+):(\\d+)\\)$/;\n const matches = mappable.exec(frame);\n if (!matches) { return frame; }\n const assm = matches[2];\n if (!(assm in sourceMaps)) { return frame; }\n const prefix = matches[1];\n const line = parseInt(matches[3], 10);\n const column = parseInt(matches[4], 10);\n const sourceMap = sourceMaps[assm];\n const pos = sourceMap.originalPositionFor({ line, column });\n if (pos.source != null && pos.line != null) {\n const source = pos.source.replace(/^webpack:\\/\\//, `${assm}`);\n return `${prefix}(${source}:${pos.line}:${pos.column || 0})`;\n }\n return frame;\n }\n}\n\ntype ObjectKey = string | number | symbol;\n/**\n * A Proxy handler class to support mutation of the returned object literals, as\n * they may \"embody\" several different interfaces. The handler is in particular\n * responsible to make sure the ``FQN_PROP`` and ``OBJID_PROP`` do not get set\n * on the ``referent`` object, for this would cause subsequent accesses to\n * possibly return incorrect object references.\n */\nclass KernelProxyHandler implements ProxyHandler {\n private readonly ownProperties: { [key: string]: any } = {};\n\n /**\n * @param referent the \"real\" value that will be returned.\n */\n constructor(public readonly referent: any) {\n /*\n * Proxy-properties must exist as non-configurable & writable on the\n * referent, otherwise the Proxy will not allow returning ``true`` in\n * response to ``defineProperty``.\n */\n for (const prop of [FQN_PROP, OBJID_PROP]) {\n Object.defineProperty(referent, prop, {\n configurable: false,\n enumerable: false,\n writable: true,\n value: undefined\n });\n }\n }\n\n public defineProperty(target: any, property: ObjectKey, attributes: PropertyDescriptor): boolean {\n switch (property) {\n case FQN_PROP:\n case OBJID_PROP:\n return Object.defineProperty(this.ownProperties, property, attributes);\n default:\n return Object.defineProperty(target, property, attributes);\n }\n }\n\n public deleteProperty(target: any, property: ObjectKey): boolean {\n switch (property) {\n case FQN_PROP:\n case OBJID_PROP:\n delete this.ownProperties[property];\n break;\n default:\n delete target[property];\n }\n return true;\n }\n\n public getOwnPropertyDescriptor(target: any, property: ObjectKey): PropertyDescriptor | undefined {\n switch (property) {\n case FQN_PROP:\n case OBJID_PROP:\n return Object.getOwnPropertyDescriptor(this.ownProperties, property);\n default:\n return Object.getOwnPropertyDescriptor(target, property);\n }\n }\n\n public get(target: any, property: ObjectKey): any {\n switch (property) {\n // Magical property for the proxy, so we can tell it's one...\n case PROXY_REFERENT_PROP:\n return this.referent;\n case FQN_PROP:\n case OBJID_PROP:\n return this.ownProperties[property];\n default:\n return target[property];\n }\n }\n\n public set(target: any, property: ObjectKey, value: any): boolean {\n switch (property) {\n case FQN_PROP:\n case OBJID_PROP:\n this.ownProperties[property] = value;\n break;\n default:\n target[property] = value;\n }\n return true;\n }\n\n public has(target: any, property: ObjectKey): boolean {\n switch (property) {\n case FQN_PROP:\n case OBJID_PROP:\n return property in this.ownProperties;\n default:\n return property in target;\n }\n }\n\n public ownKeys(target: any): ObjectKey[] {\n return Reflect.ownKeys(target).concat(Reflect.ownKeys(this.ownProperties));\n }\n}\n\ntype Proxies = { [fqn: string]: ProxyReference };\ninterface ProxyReference {\n objRef: api.ObjRef;\n handler: KernelProxyHandler;\n}\n","module.exports = function(module) {\n\tif (!module.webpackPolyfill) {\n\t\tmodule.deprecate = function() {};\n\t\tmodule.paths = [];\n\t\t// module.parent = undefined by default\n\t\tif (!module.children) module.children = [];\n\t\tObject.defineProperty(module, \"loaded\", {\n\t\t\tenumerable: true,\n\t\t\tget: function() {\n\t\t\t\treturn module.l;\n\t\t\t}\n\t\t});\n\t\tObject.defineProperty(module, \"id\", {\n\t\t\tenumerable: true,\n\t\t\tget: function() {\n\t\t\t\treturn module.i;\n\t\t\t}\n\t\t});\n\t\tmodule.webpackPolyfill = 1;\n\t}\n\treturn module;\n};\n","'use strict'\n\nconst assign = require('./util/assign')\n\nconst fs = {}\n\n// Export graceful-fs:\nassign(fs, require('./fs'))\n// Export extra methods:\nassign(fs, require('./copy'))\nassign(fs, require('./copy-sync'))\nassign(fs, require('./mkdirs'))\nassign(fs, require('./remove'))\nassign(fs, require('./json'))\nassign(fs, require('./move'))\nassign(fs, require('./move-sync'))\nassign(fs, require('./empty'))\nassign(fs, require('./ensure'))\nassign(fs, require('./output'))\nassign(fs, require('./path-exists'))\n\nmodule.exports = fs\n","'use strict'\n\n// simple mutable assign\nfunction assign () {\n const args = [].slice.call(arguments).filter(i => i)\n const dest = args.shift()\n args.forEach(src => {\n Object.keys(src).forEach(key => {\n dest[key] = src[key]\n })\n })\n\n return dest\n}\n\nmodule.exports = assign\n","var fs = require('./fs.js')\nvar constants = require('constants')\n\nvar origCwd = process.cwd\nvar cwd = null\n\nvar platform = process.env.GRACEFUL_FS_PLATFORM || process.platform\n\nprocess.cwd = function() {\n if (!cwd)\n cwd = origCwd.call(process)\n return cwd\n}\ntry {\n process.cwd()\n} catch (er) {}\n\nvar chdir = process.chdir\nprocess.chdir = function(d) {\n cwd = null\n chdir.call(process, d)\n}\n\nmodule.exports = patch\n\nfunction patch (fs) {\n // (re-)implement some things that are known busted or missing.\n\n // lchmod, broken prior to 0.6.2\n // back-port the fix here.\n if (constants.hasOwnProperty('O_SYMLINK') &&\n process.version.match(/^v0\\.6\\.[0-2]|^v0\\.5\\./)) {\n patchLchmod(fs)\n }\n\n // lutimes implementation, or no-op\n if (!fs.lutimes) {\n patchLutimes(fs)\n }\n\n // https://github.com/isaacs/node-graceful-fs/issues/4\n // Chown should not fail on einval or eperm if non-root.\n // It should not fail on enosys ever, as this just indicates\n // that a fs doesn't support the intended operation.\n\n fs.chown = chownFix(fs.chown)\n fs.fchown = chownFix(fs.fchown)\n fs.lchown = chownFix(fs.lchown)\n\n fs.chmod = chmodFix(fs.chmod)\n fs.fchmod = chmodFix(fs.fchmod)\n fs.lchmod = chmodFix(fs.lchmod)\n\n fs.chownSync = chownFixSync(fs.chownSync)\n fs.fchownSync = chownFixSync(fs.fchownSync)\n fs.lchownSync = chownFixSync(fs.lchownSync)\n\n fs.chmodSync = chmodFixSync(fs.chmodSync)\n fs.fchmodSync = chmodFixSync(fs.fchmodSync)\n fs.lchmodSync = chmodFixSync(fs.lchmodSync)\n\n fs.stat = statFix(fs.stat)\n fs.fstat = statFix(fs.fstat)\n fs.lstat = statFix(fs.lstat)\n\n fs.statSync = statFixSync(fs.statSync)\n fs.fstatSync = statFixSync(fs.fstatSync)\n fs.lstatSync = statFixSync(fs.lstatSync)\n\n // if lchmod/lchown do not exist, then make them no-ops\n if (!fs.lchmod) {\n fs.lchmod = function (path, mode, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchmodSync = function () {}\n }\n if (!fs.lchown) {\n fs.lchown = function (path, uid, gid, cb) {\n if (cb) process.nextTick(cb)\n }\n fs.lchownSync = function () {}\n }\n\n // on Windows, A/V software can lock the directory, causing this\n // to fail with an EACCES or EPERM if the directory contains newly\n // created files. Try again on failure, for up to 60 seconds.\n\n // Set the timeout this long because some Windows Anti-Virus, such as Parity\n // bit9, may lock files for up to a minute, causing npm package install\n // failures. Also, take care to yield the scheduler. Windows scheduling gives\n // CPU to a busy looping process, which can cause the program causing the lock\n // contention to be starved of CPU by node, so the contention doesn't resolve.\n if (platform === \"win32\") {\n fs.rename = (function (fs$rename) { return function (from, to, cb) {\n var start = Date.now()\n var backoff = 0;\n fs$rename(from, to, function CB (er) {\n if (er\n && (er.code === \"EACCES\" || er.code === \"EPERM\")\n && Date.now() - start < 60000) {\n setTimeout(function() {\n fs.stat(to, function (stater, st) {\n if (stater && stater.code === \"ENOENT\")\n fs$rename(from, to, CB);\n else\n cb(er)\n })\n }, backoff)\n if (backoff < 100)\n backoff += 10;\n return;\n }\n if (cb) cb(er)\n })\n }})(fs.rename)\n }\n\n // if read() returns EAGAIN, then just try it again.\n fs.read = (function (fs$read) { return function (fd, buffer, offset, length, position, callback_) {\n var callback\n if (callback_ && typeof callback_ === 'function') {\n var eagCounter = 0\n callback = function (er, _, __) {\n if (er && er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }\n callback_.apply(this, arguments)\n }\n }\n return fs$read.call(fs, fd, buffer, offset, length, position, callback)\n }})(fs.read)\n\n fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) {\n var eagCounter = 0\n while (true) {\n try {\n return fs$readSync.call(fs, fd, buffer, offset, length, position)\n } catch (er) {\n if (er.code === 'EAGAIN' && eagCounter < 10) {\n eagCounter ++\n continue\n }\n throw er\n }\n }\n }})(fs.readSync)\n}\n\nfunction patchLchmod (fs) {\n fs.lchmod = function (path, mode, callback) {\n fs.open( path\n , constants.O_WRONLY | constants.O_SYMLINK\n , mode\n , function (err, fd) {\n if (err) {\n if (callback) callback(err)\n return\n }\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n fs.fchmod(fd, mode, function (err) {\n fs.close(fd, function(err2) {\n if (callback) callback(err || err2)\n })\n })\n })\n }\n\n fs.lchmodSync = function (path, mode) {\n var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)\n\n // prefer to return the chmod error, if one occurs,\n // but still try to close, and report closing errors if they occur.\n var threw = true\n var ret\n try {\n ret = fs.fchmodSync(fd, mode)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n}\n\nfunction patchLutimes (fs) {\n if (constants.hasOwnProperty(\"O_SYMLINK\")) {\n fs.lutimes = function (path, at, mt, cb) {\n fs.open(path, constants.O_SYMLINK, function (er, fd) {\n if (er) {\n if (cb) cb(er)\n return\n }\n fs.futimes(fd, at, mt, function (er) {\n fs.close(fd, function (er2) {\n if (cb) cb(er || er2)\n })\n })\n })\n }\n\n fs.lutimesSync = function (path, at, mt) {\n var fd = fs.openSync(path, constants.O_SYMLINK)\n var ret\n var threw = true\n try {\n ret = fs.futimesSync(fd, at, mt)\n threw = false\n } finally {\n if (threw) {\n try {\n fs.closeSync(fd)\n } catch (er) {}\n } else {\n fs.closeSync(fd)\n }\n }\n return ret\n }\n\n } else {\n fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }\n fs.lutimesSync = function () {}\n }\n}\n\nfunction chmodFix (orig) {\n if (!orig) return orig\n return function (target, mode, cb) {\n return orig.call(fs, target, mode, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n}\n\nfunction chmodFixSync (orig) {\n if (!orig) return orig\n return function (target, mode) {\n try {\n return orig.call(fs, target, mode)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n}\n\n\nfunction chownFix (orig) {\n if (!orig) return orig\n return function (target, uid, gid, cb) {\n return orig.call(fs, target, uid, gid, function (er) {\n if (chownErOk(er)) er = null\n if (cb) cb.apply(this, arguments)\n })\n }\n}\n\nfunction chownFixSync (orig) {\n if (!orig) return orig\n return function (target, uid, gid) {\n try {\n return orig.call(fs, target, uid, gid)\n } catch (er) {\n if (!chownErOk(er)) throw er\n }\n }\n}\n\n\nfunction statFix (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target, cb) {\n return orig.call(fs, target, function (er, stats) {\n if (!stats) return cb.apply(this, arguments)\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n if (cb) cb.apply(this, arguments)\n })\n }\n}\n\nfunction statFixSync (orig) {\n if (!orig) return orig\n // Older versions of Node erroneously returned signed integers for\n // uid + gid.\n return function (target) {\n var stats = orig.call(fs, target)\n if (stats.uid < 0) stats.uid += 0x100000000\n if (stats.gid < 0) stats.gid += 0x100000000\n return stats;\n }\n}\n\n// ENOSYS means that the fs doesn't support the op. Just ignore\n// that, because it doesn't matter.\n//\n// if there's no getuid, or if getuid() is something other\n// than 0, and the error is EINVAL or EPERM, then just ignore\n// it.\n//\n// This specific case is a silent failure in cp, install, tar,\n// and most other unix tools that manage permissions.\n//\n// When running as root, or if other types of errors are\n// encountered, then it's strict.\nfunction chownErOk (er) {\n if (!er)\n return true\n\n if (er.code === \"ENOSYS\")\n return true\n\n var nonroot = !process.getuid || process.getuid() !== 0\n if (nonroot) {\n if (er.code === \"EINVAL\" || er.code === \"EPERM\")\n return true\n }\n\n return false\n}\n","module.exports = require(\"constants\");","var Stream = require('stream').Stream\n\nmodule.exports = legacy\n\nfunction legacy (fs) {\n return {\n ReadStream: ReadStream,\n WriteStream: WriteStream\n }\n\n function ReadStream (path, options) {\n if (!(this instanceof ReadStream)) return new ReadStream(path, options);\n\n Stream.call(this);\n\n var self = this;\n\n this.path = path;\n this.fd = null;\n this.readable = true;\n this.paused = false;\n\n this.flags = 'r';\n this.mode = 438; /*=0666*/\n this.bufferSize = 64 * 1024;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.encoding) this.setEncoding(this.encoding);\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.end === undefined) {\n this.end = Infinity;\n } else if ('number' !== typeof this.end) {\n throw TypeError('end must be a Number');\n }\n\n if (this.start > this.end) {\n throw new Error('start must be <= end');\n }\n\n this.pos = this.start;\n }\n\n if (this.fd !== null) {\n process.nextTick(function() {\n self._read();\n });\n return;\n }\n\n fs.open(this.path, this.flags, this.mode, function (err, fd) {\n if (err) {\n self.emit('error', err);\n self.readable = false;\n return;\n }\n\n self.fd = fd;\n self.emit('open', fd);\n self._read();\n })\n }\n\n function WriteStream (path, options) {\n if (!(this instanceof WriteStream)) return new WriteStream(path, options);\n\n Stream.call(this);\n\n this.path = path;\n this.fd = null;\n this.writable = true;\n\n this.flags = 'w';\n this.encoding = 'binary';\n this.mode = 438; /*=0666*/\n this.bytesWritten = 0;\n\n options = options || {};\n\n // Mixin options into this\n var keys = Object.keys(options);\n for (var index = 0, length = keys.length; index < length; index++) {\n var key = keys[index];\n this[key] = options[key];\n }\n\n if (this.start !== undefined) {\n if ('number' !== typeof this.start) {\n throw TypeError('start must be a Number');\n }\n if (this.start < 0) {\n throw new Error('start must be >= zero');\n }\n\n this.pos = this.start;\n }\n\n this.busy = false;\n this._queue = [];\n\n if (this.fd === null) {\n this._open = fs.open;\n this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);\n this.flush();\n }\n }\n}\n","module.exports = require(\"stream\");","module.exports = require(\"util\");","const u = require('universalify').fromCallback\nmodule.exports = {\n copy: u(require('./copy'))\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst ncp = require('./ncp')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\n\nfunction copy (src, dest, options, callback) {\n if (typeof options === 'function' && !callback) {\n callback = options\n options = {}\n } else if (typeof options === 'function' || options instanceof RegExp) {\n options = {filter: options}\n }\n callback = callback || function () {}\n options = options || {}\n\n // Warn about using preserveTimestamps on 32-bit node:\n if (options.preserveTimestamps && process.arch === 'ia32') {\n console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\\n\n see https://github.com/jprichardson/node-fs-extra/issues/269`)\n }\n\n // don't allow src and dest to be the same\n const basePath = process.cwd()\n const currentPath = path.resolve(basePath, src)\n const targetPath = path.resolve(basePath, dest)\n if (currentPath === targetPath) return callback(new Error('Source and destination must not be the same.'))\n\n fs.lstat(src, (err, stats) => {\n if (err) return callback(err)\n\n let dir = null\n if (stats.isDirectory()) {\n const parts = dest.split(path.sep)\n parts.pop()\n dir = parts.join(path.sep)\n } else {\n dir = path.dirname(dest)\n }\n\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return ncp(src, dest, options, callback)\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n ncp(src, dest, options, callback)\n })\n })\n })\n}\n\nmodule.exports = copy\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst os = require('os')\nconst path = require('path')\n\n// HFS, ext{2,3}, FAT do not, Node.js v0.10 does not\nfunction hasMillisResSync () {\n let tmpfile = path.join('millis-test-sync' + Date.now().toString() + Math.random().toString().slice(2))\n tmpfile = path.join(os.tmpdir(), tmpfile)\n\n // 550 millis past UNIX epoch\n const d = new Date(1435410243862)\n fs.writeFileSync(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141')\n const fd = fs.openSync(tmpfile, 'r+')\n fs.futimesSync(fd, d, d)\n fs.closeSync(fd)\n return fs.statSync(tmpfile).mtime > 1435410243000\n}\n\nfunction hasMillisRes (callback) {\n let tmpfile = path.join('millis-test' + Date.now().toString() + Math.random().toString().slice(2))\n tmpfile = path.join(os.tmpdir(), tmpfile)\n\n // 550 millis past UNIX epoch\n const d = new Date(1435410243862)\n fs.writeFile(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141', err => {\n if (err) return callback(err)\n fs.open(tmpfile, 'r+', (err, fd) => {\n if (err) return callback(err)\n fs.futimes(fd, d, d, err => {\n if (err) return callback(err)\n fs.close(fd, err => {\n if (err) return callback(err)\n fs.stat(tmpfile, (err, stats) => {\n if (err) return callback(err)\n callback(null, stats.mtime > 1435410243000)\n })\n })\n })\n })\n })\n}\n\nfunction timeRemoveMillis (timestamp) {\n if (typeof timestamp === 'number') {\n return Math.floor(timestamp / 1000) * 1000\n } else if (timestamp instanceof Date) {\n return new Date(Math.floor(timestamp.getTime() / 1000) * 1000)\n } else {\n throw new Error('fs-extra: timeRemoveMillis() unknown parameter type')\n }\n}\n\nfunction utimesMillis (path, atime, mtime, callback) {\n // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)\n fs.open(path, 'r+', (err, fd) => {\n if (err) return callback(err)\n fs.futimes(fd, atime, mtime, futimesErr => {\n fs.close(fd, closeErr => {\n if (callback) callback(futimesErr || closeErr)\n })\n })\n })\n}\n\nmodule.exports = {\n hasMillisRes,\n hasMillisResSync,\n timeRemoveMillis,\n utimesMillis\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst invalidWin32Path = require('./win32').invalidWin32Path\n\nconst o777 = parseInt('0777', 8)\n\nfunction mkdirs (p, opts, callback, made) {\n if (typeof opts === 'function') {\n callback = opts\n opts = {}\n } else if (!opts || typeof opts !== 'object') {\n opts = { mode: opts }\n }\n\n if (process.platform === 'win32' && invalidWin32Path(p)) {\n const errInval = new Error(p + ' contains invalid WIN32 path characters.')\n errInval.code = 'EINVAL'\n return callback(errInval)\n }\n\n let mode = opts.mode\n const xfs = opts.fs || fs\n\n if (mode === undefined) {\n mode = o777 & (~process.umask())\n }\n if (!made) made = null\n\n callback = callback || function () {}\n p = path.resolve(p)\n\n xfs.mkdir(p, mode, er => {\n if (!er) {\n made = made || p\n return callback(null, made)\n }\n switch (er.code) {\n case 'ENOENT':\n if (path.dirname(p) === p) return callback(er)\n mkdirs(path.dirname(p), opts, (er, made) => {\n if (er) callback(er, made)\n else mkdirs(p, opts, callback, made)\n })\n break\n\n // In the case of any other error, just see if there's a dir\n // there already. If so, then hooray! If not, then something\n // is borked.\n default:\n xfs.stat(p, (er2, stat) => {\n // if the stat fails, then that's super weird.\n // let the original error be the failure reason.\n if (er2 || !stat.isDirectory()) callback(er, made)\n else callback(null, made)\n })\n break\n }\n })\n}\n\nmodule.exports = mkdirs\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst invalidWin32Path = require('./win32').invalidWin32Path\n\nconst o777 = parseInt('0777', 8)\n\nfunction mkdirsSync (p, opts, made) {\n if (!opts || typeof opts !== 'object') {\n opts = { mode: opts }\n }\n\n let mode = opts.mode\n const xfs = opts.fs || fs\n\n if (process.platform === 'win32' && invalidWin32Path(p)) {\n const errInval = new Error(p + ' contains invalid WIN32 path characters.')\n errInval.code = 'EINVAL'\n throw errInval\n }\n\n if (mode === undefined) {\n mode = o777 & (~process.umask())\n }\n if (!made) made = null\n\n p = path.resolve(p)\n\n try {\n xfs.mkdirSync(p, mode)\n made = made || p\n } catch (err0) {\n switch (err0.code) {\n case 'ENOENT':\n if (path.dirname(p) === p) throw err0\n made = mkdirsSync(path.dirname(p), opts, made)\n mkdirsSync(p, opts, made)\n break\n\n // In the case of any other error, just see if there's a dir\n // there already. If so, then hooray! If not, then something\n // is borked.\n default:\n let stat\n try {\n stat = xfs.statSync(p)\n } catch (err1) {\n throw err0\n }\n if (!stat.isDirectory()) throw err0\n break\n }\n }\n\n return made\n}\n\nmodule.exports = mkdirsSync\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst copyFileSync = require('./copy-file-sync')\nconst mkdir = require('../mkdirs')\n\nfunction copySync (src, dest, options) {\n if (typeof options === 'function' || options instanceof RegExp) {\n options = {filter: options}\n }\n\n options = options || {}\n options.recursive = !!options.recursive\n\n // default to true for now\n options.clobber = 'clobber' in options ? !!options.clobber : true\n // overwrite falls back to clobber\n options.overwrite = 'overwrite' in options ? !!options.overwrite : options.clobber\n options.dereference = 'dereference' in options ? !!options.dereference : false\n options.preserveTimestamps = 'preserveTimestamps' in options ? !!options.preserveTimestamps : false\n\n options.filter = options.filter || function () { return true }\n\n // Warn about using preserveTimestamps on 32-bit node:\n if (options.preserveTimestamps && process.arch === 'ia32') {\n console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\\n\n see https://github.com/jprichardson/node-fs-extra/issues/269`)\n }\n\n const stats = (options.recursive && !options.dereference) ? fs.lstatSync(src) : fs.statSync(src)\n const destFolder = path.dirname(dest)\n const destFolderExists = fs.existsSync(destFolder)\n let performCopy = false\n\n if (options.filter instanceof RegExp) {\n console.warn('Warning: fs-extra: Passing a RegExp filter is deprecated, use a function')\n performCopy = options.filter.test(src)\n } else if (typeof options.filter === 'function') performCopy = options.filter(src, dest)\n\n if (stats.isFile() && performCopy) {\n if (!destFolderExists) mkdir.mkdirsSync(destFolder)\n copyFileSync(src, dest, {\n overwrite: options.overwrite,\n errorOnExist: options.errorOnExist,\n preserveTimestamps: options.preserveTimestamps\n })\n } else if (stats.isDirectory() && performCopy) {\n if (!fs.existsSync(dest)) mkdir.mkdirsSync(dest)\n const contents = fs.readdirSync(src)\n contents.forEach(content => {\n const opts = options\n opts.recursive = true\n copySync(path.join(src, content), path.join(dest, content), opts)\n })\n } else if (options.recursive && stats.isSymbolicLink() && performCopy) {\n const srcPath = fs.readlinkSync(src)\n fs.symlinkSync(srcPath, dest)\n }\n}\n\nmodule.exports = copySync\n","'use strict'\n\nconst fs = require('graceful-fs')\n\nconst BUF_LENGTH = 64 * 1024\nconst _buff = require('../util/buffer')(BUF_LENGTH)\n\nfunction copyFileSync (srcFile, destFile, options) {\n const overwrite = options.overwrite\n const errorOnExist = options.errorOnExist\n const preserveTimestamps = options.preserveTimestamps\n\n if (fs.existsSync(destFile)) {\n if (overwrite) {\n fs.unlinkSync(destFile)\n } else if (errorOnExist) {\n throw new Error(`${destFile} already exists`)\n } else return\n }\n\n const fdr = fs.openSync(srcFile, 'r')\n const stat = fs.fstatSync(fdr)\n const fdw = fs.openSync(destFile, 'w', stat.mode)\n let bytesRead = 1\n let pos = 0\n\n while (bytesRead > 0) {\n bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos)\n fs.writeSync(fdw, _buff, 0, bytesRead)\n pos += bytesRead\n }\n\n if (preserveTimestamps) {\n fs.futimesSync(fdw, stat.atime, stat.mtime)\n }\n\n fs.closeSync(fdr)\n fs.closeSync(fdw)\n}\n\nmodule.exports = copyFileSync\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst assert = require('assert')\n\nconst isWindows = (process.platform === 'win32')\n\nfunction defaults (options) {\n const methods = [\n 'unlink',\n 'chmod',\n 'stat',\n 'lstat',\n 'rmdir',\n 'readdir'\n ]\n methods.forEach(m => {\n options[m] = options[m] || fs[m]\n m = m + 'Sync'\n options[m] = options[m] || fs[m]\n })\n\n options.maxBusyTries = options.maxBusyTries || 3\n}\n\nfunction rimraf (p, options, cb) {\n let busyTries = 0\n\n if (typeof options === 'function') {\n cb = options\n options = {}\n }\n\n assert(p, 'rimraf: missing path')\n assert.equal(typeof p, 'string', 'rimraf: path should be a string')\n assert.equal(typeof cb, 'function', 'rimraf: callback function required')\n assert(options, 'rimraf: invalid options argument provided')\n assert.equal(typeof options, 'object', 'rimraf: options should be object')\n\n defaults(options)\n\n rimraf_(p, options, function CB (er) {\n if (er) {\n if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') &&\n busyTries < options.maxBusyTries) {\n busyTries++\n let time = busyTries * 100\n // try again, with the same exact callback as this one.\n return setTimeout(() => rimraf_(p, options, CB), time)\n }\n\n // already gone\n if (er.code === 'ENOENT') er = null\n }\n\n cb(er)\n })\n}\n\n// Two possible strategies.\n// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR\n// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR\n//\n// Both result in an extra syscall when you guess wrong. However, there\n// are likely far more normal files in the world than directories. This\n// is based on the assumption that a the average number of files per\n// directory is >= 1.\n//\n// If anyone ever complains about this, then I guess the strategy could\n// be made configurable somehow. But until then, YAGNI.\nfunction rimraf_ (p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n // sunos lets the root user unlink directories, which is... weird.\n // so we have to lstat here and make sure it's not a dir.\n options.lstat(p, (er, st) => {\n if (er && er.code === 'ENOENT') {\n return cb(null)\n }\n\n // Windows can EPERM on stat. Life is suffering.\n if (er && er.code === 'EPERM' && isWindows) {\n return fixWinEPERM(p, options, er, cb)\n }\n\n if (st && st.isDirectory()) {\n return rmdir(p, options, er, cb)\n }\n\n options.unlink(p, er => {\n if (er) {\n if (er.code === 'ENOENT') {\n return cb(null)\n }\n if (er.code === 'EPERM') {\n return (isWindows)\n ? fixWinEPERM(p, options, er, cb)\n : rmdir(p, options, er, cb)\n }\n if (er.code === 'EISDIR') {\n return rmdir(p, options, er, cb)\n }\n }\n return cb(er)\n })\n })\n}\n\nfunction fixWinEPERM (p, options, er, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n if (er) {\n assert(er instanceof Error)\n }\n\n options.chmod(p, 0o666, er2 => {\n if (er2) {\n cb(er2.code === 'ENOENT' ? null : er)\n } else {\n options.stat(p, (er3, stats) => {\n if (er3) {\n cb(er3.code === 'ENOENT' ? null : er)\n } else if (stats.isDirectory()) {\n rmdir(p, options, er, cb)\n } else {\n options.unlink(p, cb)\n }\n })\n }\n })\n}\n\nfunction fixWinEPERMSync (p, options, er) {\n let stats\n\n assert(p)\n assert(options)\n if (er) {\n assert(er instanceof Error)\n }\n\n try {\n options.chmodSync(p, 0o666)\n } catch (er2) {\n if (er2.code === 'ENOENT') {\n return\n } else {\n throw er\n }\n }\n\n try {\n stats = options.statSync(p)\n } catch (er3) {\n if (er3.code === 'ENOENT') {\n return\n } else {\n throw er\n }\n }\n\n if (stats.isDirectory()) {\n rmdirSync(p, options, er)\n } else {\n options.unlinkSync(p)\n }\n}\n\nfunction rmdir (p, options, originalEr, cb) {\n assert(p)\n assert(options)\n if (originalEr) {\n assert(originalEr instanceof Error)\n }\n assert(typeof cb === 'function')\n\n // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)\n // if we guessed wrong, and it's not a directory, then\n // raise the original error.\n options.rmdir(p, er => {\n if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) {\n rmkids(p, options, cb)\n } else if (er && er.code === 'ENOTDIR') {\n cb(originalEr)\n } else {\n cb(er)\n }\n })\n}\n\nfunction rmkids (p, options, cb) {\n assert(p)\n assert(options)\n assert(typeof cb === 'function')\n\n options.readdir(p, (er, files) => {\n if (er) return cb(er)\n\n let n = files.length\n let errState\n\n if (n === 0) return options.rmdir(p, cb)\n\n files.forEach(f => {\n rimraf(path.join(p, f), options, er => {\n if (errState) {\n return\n }\n if (er) return cb(errState = er)\n if (--n === 0) {\n options.rmdir(p, cb)\n }\n })\n })\n })\n}\n\n// this looks simpler, and is strictly *faster*, but will\n// tie up the JavaScript thread and fail on excessively\n// deep directory trees.\nfunction rimrafSync (p, options) {\n let st\n\n options = options || {}\n defaults(options)\n\n assert(p, 'rimraf: missing path')\n assert.equal(typeof p, 'string', 'rimraf: path should be a string')\n assert(options, 'rimraf: missing options')\n assert.equal(typeof options, 'object', 'rimraf: options should be object')\n\n try {\n st = options.lstatSync(p)\n } catch (er) {\n if (er.code === 'ENOENT') {\n return\n }\n\n // Windows can EPERM on stat. Life is suffering.\n if (er.code === 'EPERM' && isWindows) {\n fixWinEPERMSync(p, options, er)\n }\n }\n\n try {\n // sunos lets the root user unlink directories, which is... weird.\n if (st && st.isDirectory()) {\n rmdirSync(p, options, null)\n } else {\n options.unlinkSync(p)\n }\n } catch (er) {\n if (er.code === 'ENOENT') {\n return\n } else if (er.code === 'EPERM') {\n return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)\n } else if (er.code !== 'EISDIR') {\n throw er\n }\n rmdirSync(p, options, er)\n }\n}\n\nfunction rmdirSync (p, options, originalEr) {\n assert(p)\n assert(options)\n if (originalEr) {\n assert(originalEr instanceof Error)\n }\n\n try {\n options.rmdirSync(p)\n } catch (er) {\n if (er.code === 'ENOTDIR') {\n throw originalEr\n } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') {\n rmkidsSync(p, options)\n } else if (er.code !== 'ENOENT') {\n throw er\n }\n }\n}\n\nfunction rmkidsSync (p, options) {\n assert(p)\n assert(options)\n options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))\n\n // We only end up here once we got ENOTEMPTY at least once, and\n // at this point, we are guaranteed to have removed all the kids.\n // So, we know that it won't be ENOENT or ENOTDIR or anything else.\n // try really hard to delete stuff on windows, because it has a\n // PROFOUNDLY annoying habit of not closing handles promptly when\n // files are deleted, resulting in spurious ENOTEMPTY errors.\n const retries = isWindows ? 100 : 1\n let i = 0\n do {\n let threw = true\n try {\n const ret = options.rmdirSync(p, options)\n threw = false\n return ret\n } finally {\n if (++i < retries && threw) continue // eslint-disable-line\n }\n } while (true)\n}\n\nmodule.exports = rimraf\nrimraf.sync = rimrafSync\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst jsonFile = require('./jsonfile')\n\njsonFile.outputJson = u(require('./output-json'))\njsonFile.outputJsonSync = require('./output-json-sync')\n// aliases\njsonFile.outputJSON = jsonFile.outputJson\njsonFile.outputJSONSync = jsonFile.outputJsonSync\njsonFile.writeJSON = jsonFile.writeJson\njsonFile.writeJSONSync = jsonFile.writeJsonSync\njsonFile.readJSON = jsonFile.readJson\njsonFile.readJSONSync = jsonFile.readJsonSync\n\nmodule.exports = jsonFile\n","var _fs\ntry {\n _fs = require('graceful-fs')\n} catch (_) {\n _fs = require('fs')\n}\n\nfunction readFile (file, options, callback) {\n if (callback == null) {\n callback = options\n options = {}\n }\n\n if (typeof options === 'string') {\n options = {encoding: options}\n }\n\n options = options || {}\n var fs = options.fs || _fs\n\n var shouldThrow = true\n if ('throws' in options) {\n shouldThrow = options.throws\n }\n\n fs.readFile(file, options, function (err, data) {\n if (err) return callback(err)\n\n data = stripBom(data)\n\n var obj\n try {\n obj = JSON.parse(data, options ? options.reviver : null)\n } catch (err2) {\n if (shouldThrow) {\n err2.message = file + ': ' + err2.message\n return callback(err2)\n } else {\n return callback(null, null)\n }\n }\n\n callback(null, obj)\n })\n}\n\nfunction readFileSync (file, options) {\n options = options || {}\n if (typeof options === 'string') {\n options = {encoding: options}\n }\n\n var fs = options.fs || _fs\n\n var shouldThrow = true\n if ('throws' in options) {\n shouldThrow = options.throws\n }\n\n try {\n var content = fs.readFileSync(file, options)\n content = stripBom(content)\n return JSON.parse(content, options.reviver)\n } catch (err) {\n if (shouldThrow) {\n err.message = file + ': ' + err.message\n throw err\n } else {\n return null\n }\n }\n}\n\nfunction stringify (obj, options) {\n var spaces\n var EOL = '\\n'\n if (typeof options === 'object' && options !== null) {\n if (options.spaces) {\n spaces = options.spaces\n }\n if (options.EOL) {\n EOL = options.EOL\n }\n }\n\n var str = JSON.stringify(obj, options ? options.replacer : null, spaces)\n\n return str.replace(/\\n/g, EOL) + EOL\n}\n\nfunction writeFile (file, obj, options, callback) {\n if (callback == null) {\n callback = options\n options = {}\n }\n options = options || {}\n var fs = options.fs || _fs\n\n var str = ''\n try {\n str = stringify(obj, options)\n } catch (err) {\n // Need to return whether a callback was passed or not\n if (callback) callback(err, null)\n return\n }\n\n fs.writeFile(file, str, options, callback)\n}\n\nfunction writeFileSync (file, obj, options) {\n options = options || {}\n var fs = options.fs || _fs\n\n var str = stringify(obj, options)\n // not sure if fs.writeFileSync returns anything, but just in case\n return fs.writeFileSync(file, str, options)\n}\n\nfunction stripBom (content) {\n // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified\n if (Buffer.isBuffer(content)) content = content.toString('utf8')\n content = content.replace(/^\\uFEFF/, '')\n return content\n}\n\nvar jsonfile = {\n readFile: readFile,\n readFileSync: readFileSync,\n writeFile: writeFile,\n writeFileSync: writeFileSync\n}\n\nmodule.exports = jsonfile\n","'use strict'\n\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\nconst jsonFile = require('./jsonfile')\n\nfunction outputJson (file, data, options, callback) {\n if (typeof options === 'function') {\n callback = options\n options = {}\n }\n\n const dir = path.dirname(file)\n\n pathExists(dir, (err, itDoes) => {\n if (err) return callback(err)\n if (itDoes) return jsonFile.writeJson(file, data, options, callback)\n\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n jsonFile.writeJson(file, data, options, callback)\n })\n })\n}\n\nmodule.exports = outputJson\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst jsonFile = require('./jsonfile')\n\nfunction outputJsonSync (file, data, options) {\n const dir = path.dirname(file)\n\n if (!fs.existsSync(dir)) {\n mkdir.mkdirsSync(dir)\n }\n\n jsonFile.writeJsonSync(file, data, options)\n}\n\nmodule.exports = outputJsonSync\n","'use strict'\n\n// most of this code was written by Andrew Kelley\n// licensed under the BSD license: see\n// https://github.com/andrewrk/node-mv/blob/master/package.json\n\n// this needs a cleanup\n\nconst u = require('universalify').fromCallback\nconst fs = require('graceful-fs')\nconst ncp = require('../copy/ncp')\nconst path = require('path')\nconst remove = require('../remove').remove\nconst mkdirp = require('../mkdirs').mkdirs\n\nfunction move (src, dest, options, callback) {\n if (typeof options === 'function') {\n callback = options\n options = {}\n }\n\n const overwrite = options.overwrite || options.clobber || false\n\n isSrcSubdir(src, dest, (err, itIs) => {\n if (err) return callback(err)\n if (itIs) return callback(new Error(`Cannot move '${src}' to a subdirectory of itself, '${dest}'.`))\n mkdirp(path.dirname(dest), err => {\n if (err) return callback(err)\n doRename()\n })\n })\n\n function doRename () {\n if (path.resolve(src) === path.resolve(dest)) {\n fs.access(src, callback)\n } else if (overwrite) {\n fs.rename(src, dest, err => {\n if (!err) return callback()\n\n if (err.code === 'ENOTEMPTY' || err.code === 'EEXIST') {\n remove(dest, err => {\n if (err) return callback(err)\n options.overwrite = false // just overwriteed it, no need to do it again\n move(src, dest, options, callback)\n })\n return\n }\n\n // weird Windows shit\n if (err.code === 'EPERM') {\n setTimeout(() => {\n remove(dest, err => {\n if (err) return callback(err)\n options.overwrite = false\n move(src, dest, options, callback)\n })\n }, 200)\n return\n }\n\n if (err.code !== 'EXDEV') return callback(err)\n moveAcrossDevice(src, dest, overwrite, callback)\n })\n } else {\n fs.link(src, dest, err => {\n if (err) {\n if (err.code === 'EXDEV' || err.code === 'EISDIR' || err.code === 'EPERM' || err.code === 'ENOTSUP') {\n return moveAcrossDevice(src, dest, overwrite, callback)\n }\n return callback(err)\n }\n return fs.unlink(src, callback)\n })\n }\n }\n}\n\nfunction moveAcrossDevice (src, dest, overwrite, callback) {\n fs.stat(src, (err, stat) => {\n if (err) return callback(err)\n\n if (stat.isDirectory()) {\n moveDirAcrossDevice(src, dest, overwrite, callback)\n } else {\n moveFileAcrossDevice(src, dest, overwrite, callback)\n }\n })\n}\n\nfunction moveFileAcrossDevice (src, dest, overwrite, callback) {\n const flags = overwrite ? 'w' : 'wx'\n const ins = fs.createReadStream(src)\n const outs = fs.createWriteStream(dest, { flags })\n\n ins.on('error', err => {\n ins.destroy()\n outs.destroy()\n outs.removeListener('close', onClose)\n\n // may want to create a directory but `out` line above\n // creates an empty file for us: See #108\n // don't care about error here\n fs.unlink(dest, () => {\n // note: `err` here is from the input stream errror\n if (err.code === 'EISDIR' || err.code === 'EPERM') {\n moveDirAcrossDevice(src, dest, overwrite, callback)\n } else {\n callback(err)\n }\n })\n })\n\n outs.on('error', err => {\n ins.destroy()\n outs.destroy()\n outs.removeListener('close', onClose)\n callback(err)\n })\n\n outs.once('close', onClose)\n ins.pipe(outs)\n\n function onClose () {\n fs.unlink(src, callback)\n }\n}\n\nfunction moveDirAcrossDevice (src, dest, overwrite, callback) {\n const options = {\n overwrite: false\n }\n\n if (overwrite) {\n remove(dest, err => {\n if (err) return callback(err)\n startNcp()\n })\n } else {\n startNcp()\n }\n\n function startNcp () {\n ncp(src, dest, options, err => {\n if (err) return callback(err)\n remove(src, callback)\n })\n }\n}\n\n// return true if dest is a subdir of src, otherwise false.\n// extract dest base dir and check if that is the same as src basename\nfunction isSrcSubdir (src, dest, cb) {\n fs.stat(src, (err, st) => {\n if (err) return cb(err)\n if (st.isDirectory()) {\n const baseDir = dest.split(path.dirname(src) + path.sep)[1]\n if (baseDir) {\n const destBasename = baseDir.split(path.sep)[0]\n if (destBasename) return cb(null, src !== dest && dest.indexOf(src) > -1 && destBasename === path.basename(src))\n return cb(null, false)\n }\n return cb(null, false)\n }\n return cb(null, false)\n })\n}\n\nmodule.exports = {\n move: u(move)\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst copySync = require('../copy-sync').copySync\nconst removeSync = require('../remove').removeSync\nconst mkdirpSync = require('../mkdirs').mkdirsSync\nconst buffer = require('../util/buffer')\n\nfunction moveSync (src, dest, options) {\n options = options || {}\n const overwrite = options.overwrite || options.clobber || false\n\n src = path.resolve(src)\n dest = path.resolve(dest)\n\n if (src === dest) return fs.accessSync(src)\n\n if (isSrcSubdir(src, dest)) throw new Error(`Cannot move '${src}' into itself '${dest}'.`)\n\n mkdirpSync(path.dirname(dest))\n tryRenameSync()\n\n function tryRenameSync () {\n if (overwrite) {\n try {\n return fs.renameSync(src, dest)\n } catch (err) {\n if (err.code === 'ENOTEMPTY' || err.code === 'EEXIST' || err.code === 'EPERM') {\n removeSync(dest)\n options.overwrite = false // just overwriteed it, no need to do it again\n return moveSync(src, dest, options)\n }\n\n if (err.code !== 'EXDEV') throw err\n return moveSyncAcrossDevice(src, dest, overwrite)\n }\n } else {\n try {\n fs.linkSync(src, dest)\n return fs.unlinkSync(src)\n } catch (err) {\n if (err.code === 'EXDEV' || err.code === 'EISDIR' || err.code === 'EPERM' || err.code === 'ENOTSUP') {\n return moveSyncAcrossDevice(src, dest, overwrite)\n }\n throw err\n }\n }\n }\n}\n\nfunction moveSyncAcrossDevice (src, dest, overwrite) {\n const stat = fs.statSync(src)\n\n if (stat.isDirectory()) {\n return moveDirSyncAcrossDevice(src, dest, overwrite)\n } else {\n return moveFileSyncAcrossDevice(src, dest, overwrite)\n }\n}\n\nfunction moveFileSyncAcrossDevice (src, dest, overwrite) {\n const BUF_LENGTH = 64 * 1024\n const _buff = buffer(BUF_LENGTH)\n\n const flags = overwrite ? 'w' : 'wx'\n\n const fdr = fs.openSync(src, 'r')\n const stat = fs.fstatSync(fdr)\n const fdw = fs.openSync(dest, flags, stat.mode)\n let bytesRead = 1\n let pos = 0\n\n while (bytesRead > 0) {\n bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos)\n fs.writeSync(fdw, _buff, 0, bytesRead)\n pos += bytesRead\n }\n\n fs.closeSync(fdr)\n fs.closeSync(fdw)\n return fs.unlinkSync(src)\n}\n\nfunction moveDirSyncAcrossDevice (src, dest, overwrite) {\n const options = {\n overwrite: false\n }\n\n if (overwrite) {\n removeSync(dest)\n tryCopySync()\n } else {\n tryCopySync()\n }\n\n function tryCopySync () {\n copySync(src, dest, options)\n return removeSync(src)\n }\n}\n\n// return true if dest is a subdir of src, otherwise false.\n// extract dest base dir and check if that is the same as src basename\nfunction isSrcSubdir (src, dest) {\n try {\n return fs.statSync(src).isDirectory() &&\n src !== dest &&\n dest.indexOf(src) > -1 &&\n dest.split(path.dirname(src) + path.sep)[1].split(path.sep)[0] === path.basename(src)\n } catch (e) {\n return false\n }\n}\n\nmodule.exports = {\n moveSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst fs = require('fs')\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst remove = require('../remove')\n\nconst emptyDir = u(function emptyDir (dir, callback) {\n callback = callback || function () {}\n fs.readdir(dir, (err, items) => {\n if (err) return mkdir.mkdirs(dir, callback)\n\n items = items.map(item => path.join(dir, item))\n\n deleteItem()\n\n function deleteItem () {\n const item = items.pop()\n if (!item) return callback()\n remove.remove(item, err => {\n if (err) return callback(err)\n deleteItem()\n })\n }\n })\n})\n\nfunction emptyDirSync (dir) {\n let items\n try {\n items = fs.readdirSync(dir)\n } catch (err) {\n return mkdir.mkdirsSync(dir)\n }\n\n items.forEach(item => {\n item = path.join(dir, item)\n remove.removeSync(item)\n })\n}\n\nmodule.exports = {\n emptyDirSync,\n emptydirSync: emptyDirSync,\n emptyDir,\n emptydir: emptyDir\n}\n","'use strict'\n\nconst file = require('./file')\nconst link = require('./link')\nconst symlink = require('./symlink')\n\nmodule.exports = {\n // file\n createFile: file.createFile,\n createFileSync: file.createFileSync,\n ensureFile: file.createFile,\n ensureFileSync: file.createFileSync,\n // link\n createLink: link.createLink,\n createLinkSync: link.createLinkSync,\n ensureLink: link.createLink,\n ensureLinkSync: link.createLinkSync,\n // symlink\n createSymlink: symlink.createSymlink,\n createSymlinkSync: symlink.createSymlinkSync,\n ensureSymlink: symlink.createSymlink,\n ensureSymlinkSync: symlink.createSymlinkSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\n\nfunction createFile (file, callback) {\n function makeFile () {\n fs.writeFile(file, '', err => {\n if (err) return callback(err)\n callback()\n })\n }\n\n fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err\n if (!err && stats.isFile()) return callback()\n const dir = path.dirname(file)\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return makeFile()\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n makeFile()\n })\n })\n })\n}\n\nfunction createFileSync (file) {\n let stats\n try {\n stats = fs.statSync(file)\n } catch (e) {}\n if (stats && stats.isFile()) return\n\n const dir = path.dirname(file)\n if (!fs.existsSync(dir)) {\n mkdir.mkdirsSync(dir)\n }\n\n fs.writeFileSync(file, '')\n}\n\nmodule.exports = {\n createFile: u(createFile),\n createFileSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\n\nfunction createLink (srcpath, dstpath, callback) {\n function makeLink (srcpath, dstpath) {\n fs.link(srcpath, dstpath, err => {\n if (err) return callback(err)\n callback(null)\n })\n }\n\n pathExists(dstpath, (err, destinationExists) => {\n if (err) return callback(err)\n if (destinationExists) return callback(null)\n fs.lstat(srcpath, (err, stat) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureLink')\n return callback(err)\n }\n\n const dir = path.dirname(dstpath)\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return makeLink(srcpath, dstpath)\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n makeLink(srcpath, dstpath)\n })\n })\n })\n })\n}\n\nfunction createLinkSync (srcpath, dstpath, callback) {\n const destinationExists = fs.existsSync(dstpath)\n if (destinationExists) return undefined\n\n try {\n fs.lstatSync(srcpath)\n } catch (err) {\n err.message = err.message.replace('lstat', 'ensureLink')\n throw err\n }\n\n const dir = path.dirname(dstpath)\n const dirExists = fs.existsSync(dir)\n if (dirExists) return fs.linkSync(srcpath, dstpath)\n mkdir.mkdirsSync(dir)\n\n return fs.linkSync(srcpath, dstpath)\n}\n\nmodule.exports = {\n createLink: u(createLink),\n createLinkSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst _mkdirs = require('../mkdirs')\nconst mkdirs = _mkdirs.mkdirs\nconst mkdirsSync = _mkdirs.mkdirsSync\n\nconst _symlinkPaths = require('./symlink-paths')\nconst symlinkPaths = _symlinkPaths.symlinkPaths\nconst symlinkPathsSync = _symlinkPaths.symlinkPathsSync\n\nconst _symlinkType = require('./symlink-type')\nconst symlinkType = _symlinkType.symlinkType\nconst symlinkTypeSync = _symlinkType.symlinkTypeSync\n\nconst pathExists = require('../path-exists').pathExists\n\nfunction createSymlink (srcpath, dstpath, type, callback) {\n callback = (typeof type === 'function') ? type : callback\n type = (typeof type === 'function') ? false : type\n\n pathExists(dstpath, (err, destinationExists) => {\n if (err) return callback(err)\n if (destinationExists) return callback(null)\n symlinkPaths(srcpath, dstpath, (err, relative) => {\n if (err) return callback(err)\n srcpath = relative.toDst\n symlinkType(relative.toCwd, type, (err, type) => {\n if (err) return callback(err)\n const dir = path.dirname(dstpath)\n pathExists(dir, (err, dirExists) => {\n if (err) return callback(err)\n if (dirExists) return fs.symlink(srcpath, dstpath, type, callback)\n mkdirs(dir, err => {\n if (err) return callback(err)\n fs.symlink(srcpath, dstpath, type, callback)\n })\n })\n })\n })\n })\n}\n\nfunction createSymlinkSync (srcpath, dstpath, type, callback) {\n callback = (typeof type === 'function') ? type : callback\n type = (typeof type === 'function') ? false : type\n\n const destinationExists = fs.existsSync(dstpath)\n if (destinationExists) return undefined\n\n const relative = symlinkPathsSync(srcpath, dstpath)\n srcpath = relative.toDst\n type = symlinkTypeSync(relative.toCwd, type)\n const dir = path.dirname(dstpath)\n const exists = fs.existsSync(dir)\n if (exists) return fs.symlinkSync(srcpath, dstpath, type)\n mkdirsSync(dir)\n return fs.symlinkSync(srcpath, dstpath, type)\n}\n\nmodule.exports = {\n createSymlink: u(createSymlink),\n createSymlinkSync\n}\n","'use strict'\n\nconst path = require('path')\nconst fs = require('graceful-fs')\nconst pathExists = require('../path-exists').pathExists\n\n/**\n * Function that returns two types of paths, one relative to symlink, and one\n * relative to the current working directory. Checks if path is absolute or\n * relative. If the path is relative, this function checks if the path is\n * relative to symlink or relative to current working directory. This is an\n * initiative to find a smarter `srcpath` to supply when building symlinks.\n * This allows you to determine which path to use out of one of three possible\n * types of source paths. The first is an absolute path. This is detected by\n * `path.isAbsolute()`. When an absolute path is provided, it is checked to\n * see if it exists. If it does it's used, if not an error is returned\n * (callback)/ thrown (sync). The other two options for `srcpath` are a\n * relative url. By default Node's `fs.symlink` works by creating a symlink\n * using `dstpath` and expects the `srcpath` to be relative to the newly\n * created symlink. If you provide a `srcpath` that does not exist on the file\n * system it results in a broken symlink. To minimize this, the function\n * checks to see if the 'relative to symlink' source file exists, and if it\n * does it will use it. If it does not, it checks if there's a file that\n * exists that is relative to the current working directory, if does its used.\n * This preserves the expectations of the original fs.symlink spec and adds\n * the ability to pass in `relative to current working direcotry` paths.\n */\n\nfunction symlinkPaths (srcpath, dstpath, callback) {\n if (path.isAbsolute(srcpath)) {\n return fs.lstat(srcpath, (err, stat) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureSymlink')\n return callback(err)\n }\n return callback(null, {\n 'toCwd': srcpath,\n 'toDst': srcpath\n })\n })\n } else {\n const dstdir = path.dirname(dstpath)\n const relativeToDst = path.join(dstdir, srcpath)\n return pathExists(relativeToDst, (err, exists) => {\n if (err) return callback(err)\n if (exists) {\n return callback(null, {\n 'toCwd': relativeToDst,\n 'toDst': srcpath\n })\n } else {\n return fs.lstat(srcpath, (err, stat) => {\n if (err) {\n err.message = err.message.replace('lstat', 'ensureSymlink')\n return callback(err)\n }\n return callback(null, {\n 'toCwd': srcpath,\n 'toDst': path.relative(dstdir, srcpath)\n })\n })\n }\n })\n }\n}\n\nfunction symlinkPathsSync (srcpath, dstpath) {\n let exists\n if (path.isAbsolute(srcpath)) {\n exists = fs.existsSync(srcpath)\n if (!exists) throw new Error('absolute srcpath does not exist')\n return {\n 'toCwd': srcpath,\n 'toDst': srcpath\n }\n } else {\n const dstdir = path.dirname(dstpath)\n const relativeToDst = path.join(dstdir, srcpath)\n exists = fs.existsSync(relativeToDst)\n if (exists) {\n return {\n 'toCwd': relativeToDst,\n 'toDst': srcpath\n }\n } else {\n exists = fs.existsSync(srcpath)\n if (!exists) throw new Error('relative srcpath does not exist')\n return {\n 'toCwd': srcpath,\n 'toDst': path.relative(dstdir, srcpath)\n }\n }\n }\n}\n\nmodule.exports = {\n symlinkPaths,\n symlinkPathsSync\n}\n","'use strict'\n\nconst fs = require('graceful-fs')\n\nfunction symlinkType (srcpath, type, callback) {\n callback = (typeof type === 'function') ? type : callback\n type = (typeof type === 'function') ? false : type\n if (type) return callback(null, type)\n fs.lstat(srcpath, (err, stats) => {\n if (err) return callback(null, 'file')\n type = (stats && stats.isDirectory()) ? 'dir' : 'file'\n callback(null, type)\n })\n}\n\nfunction symlinkTypeSync (srcpath, type) {\n let stats\n\n if (type) return type\n try {\n stats = fs.lstatSync(srcpath)\n } catch (e) {\n return 'file'\n }\n return (stats && stats.isDirectory()) ? 'dir' : 'file'\n}\n\nmodule.exports = {\n symlinkType,\n symlinkTypeSync\n}\n","'use strict'\n\nconst u = require('universalify').fromCallback\nconst fs = require('graceful-fs')\nconst path = require('path')\nconst mkdir = require('../mkdirs')\nconst pathExists = require('../path-exists').pathExists\n\nfunction outputFile (file, data, encoding, callback) {\n if (typeof encoding === 'function') {\n callback = encoding\n encoding = 'utf8'\n }\n\n const dir = path.dirname(file)\n pathExists(dir, (err, itDoes) => {\n if (err) return callback(err)\n if (itDoes) return fs.writeFile(file, data, encoding, callback)\n\n mkdir.mkdirs(dir, err => {\n if (err) return callback(err)\n\n fs.writeFile(file, data, encoding, callback)\n })\n })\n}\n\nfunction outputFileSync (file, data, encoding) {\n const dir = path.dirname(file)\n if (fs.existsSync(dir)) {\n return fs.writeFileSync.apply(fs, arguments)\n }\n mkdir.mkdirsSync(dir)\n fs.writeFileSync.apply(fs, arguments)\n}\n\nmodule.exports = {\n outputFile: u(outputFile),\n outputFileSync\n}\n","\"use strict\";\nfunction __export(m) {\n for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];\n}\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__export(require(\"./name-tree\"));\n__export(require(\"./spec\"));\n__export(require(\"./validate-assembly\"));\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * A tree of all names in a module. A node represent a type (terminal)\n * and may represent another node in the namespace (at the same time).\n * Therefore, a key of '_' represents a terminal and references the fqn\n * of the type.\n *\n * For example, say we have the following types:\n * - aws.ec2.Host\n * - aws.ec2.Instance\n * - aws.ec2.Instance.Subtype\n *\n * the the name tree will look like this:\n *\n * module: {\n * children: {\n * aws: {\n * children {\n * ec2: {\n * children: {\n * Host: {\n * fqn: 'aws.ec2.Host',\n * children: {}\n * },\n * Instance: {\n * fqn: 'aws.ec2.Host',\n * children: {\n * Subtype: {\n * fqn: 'aws.ec2.Host.Subtype',\n * children: {}\n * }\n * }\n * }\n * }\n * }\n * }\n * }\n * }\n * }\n */\nclass NameTree {\n /* NameTree.of(assembly) should be used. */\n constructor() {\n this._children = {};\n }\n static of(assm) {\n const nameTree = new NameTree();\n for (const type of Object.values(assm.types || {})) {\n nameTree.register(type.fqn);\n }\n return nameTree;\n }\n /** The children of this node, by name. */\n get children() {\n return this._children;\n }\n /** The fully qualified name of the type at this node, if there is one. */\n get fqn() {\n return this._fqn;\n }\n /**\n * Adds a type to this ``NameTree``.\n *\n * @param type the type to be added.\n * @param path the path at which to add the node under this tree.\n */\n register(fqn, path = fqn.split('.')) {\n if (path.length === 0) {\n this._fqn = fqn;\n }\n else {\n const [head, ...rest] = path;\n if (!this._children[head]) {\n this._children[head] = new NameTree();\n }\n this._children[head].register(fqn, rest);\n }\n return this;\n }\n}\nexports.NameTree = NameTree;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SPEC_FILE_NAME = '.jsii';\n/**\n * Versions of the JSII Assembly Specification.\n */\nvar SchemaVersion;\n(function (SchemaVersion) {\n SchemaVersion[\"V1_0\"] = \"jsii/1.0\";\n})(SchemaVersion = exports.SchemaVersion || (exports.SchemaVersion = {}));\n/**\n * Kinds of collections.\n */\nvar CollectionKind;\n(function (CollectionKind) {\n CollectionKind[\"Array\"] = \"array\";\n CollectionKind[\"Map\"] = \"map\";\n})(CollectionKind = exports.CollectionKind || (exports.CollectionKind = {}));\n/**\n * Kinds of primitive types.\n */\nvar PrimitiveType;\n(function (PrimitiveType) {\n PrimitiveType[\"Date\"] = \"date\";\n PrimitiveType[\"String\"] = \"string\";\n PrimitiveType[\"Number\"] = \"number\";\n PrimitiveType[\"Boolean\"] = \"boolean\";\n /**\n * A JSON object\n */\n PrimitiveType[\"Json\"] = \"json\";\n /**\n * Value with \"any\" or \"unknown\" type (aka Object)\n */\n PrimitiveType[\"Any\"] = \"any\";\n})(PrimitiveType = exports.PrimitiveType || (exports.PrimitiveType = {}));\nfunction isNamedTypeReference(ref) {\n return ref != null && !!ref.fqn;\n}\nexports.isNamedTypeReference = isNamedTypeReference;\nfunction isPrimitiveTypeReference(ref) {\n return ref != null && !!ref.primitive;\n}\nexports.isPrimitiveTypeReference = isPrimitiveTypeReference;\nfunction isCollectionTypeReference(ref) {\n return ref != null && !!ref.collection;\n}\nexports.isCollectionTypeReference = isCollectionTypeReference;\nfunction isUnionTypeReference(ref) {\n return ref != null && !!ref.union;\n}\nexports.isUnionTypeReference = isUnionTypeReference;\n/**\n * Kinds of types.\n */\nvar TypeKind;\n(function (TypeKind) {\n TypeKind[\"Class\"] = \"class\";\n TypeKind[\"Enum\"] = \"enum\";\n TypeKind[\"Interface\"] = \"interface\";\n})(TypeKind = exports.TypeKind || (exports.TypeKind = {}));\nfunction isClassType(type) {\n return type != null && type.kind === TypeKind.Class;\n}\nexports.isClassType = isClassType;\nfunction isInterfaceType(type) {\n return type != null && type.kind === TypeKind.Interface;\n}\nexports.isInterfaceType = isInterfaceType;\nfunction isEnumType(type) {\n return type != null && type.kind === TypeKind.Enum;\n}\nexports.isEnumType = isEnumType;\n/**\n * Return whether this type is a class or interface type\n */\nfunction isClassOrInterfaceType(type) {\n return isClassType(type) || isInterfaceType(type);\n}\nexports.isClassOrInterfaceType = isClassOrInterfaceType;\n/**\n * Return a string representation of the given type reference\n */\nfunction describeTypeReference(a) {\n if (a === undefined) {\n return '(none)';\n }\n const optionalMarker = a.optional ? '?' : '';\n if (isNamedTypeReference(a)) {\n return `${a.fqn}${optionalMarker}`;\n }\n if (isPrimitiveTypeReference(a)) {\n return `${a.primitive}${optionalMarker}`;\n }\n if (isCollectionTypeReference(a)) {\n return `${a.collection.kind}<${describeTypeReference(a.collection.elementtype)}>${optionalMarker}`;\n }\n if (isUnionTypeReference(a)) {\n const unionType = a.union.types.map(describeTypeReference).join(' | ');\n if (a.optional) {\n return `(${unionType})${optionalMarker}`;\n }\n else {\n return unionType;\n }\n }\n throw new Error('Unrecognized type reference');\n}\nexports.describeTypeReference = describeTypeReference;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst jsonschema = require(\"jsonschema\");\n// tslint:disable-next-line:no-var-requires\nexports.schema = require('../schema/jsii-spec.schema.json');\nfunction validateAssembly(obj) {\n const validator = new jsonschema.Validator();\n const result = validator.validate(obj, exports.schema);\n if (result.valid) {\n return obj;\n }\n throw new Error(`Invalid assembly:\\n${result}`);\n}\nexports.validateAssembly = validateAssembly;\n","'use strict';\n\nvar Validator = module.exports.Validator = require('./validator');\n\nmodule.exports.ValidatorResult = require('./helpers').ValidatorResult;\nmodule.exports.ValidationError = require('./helpers').ValidationError;\nmodule.exports.SchemaError = require('./helpers').SchemaError;\nmodule.exports.SchemaScanResult = require('./scan').SchemaScanResult;\nmodule.exports.scan = require('./scan').scan;\n\nmodule.exports.validate = function (instance, schema, options) {\n var v = new Validator();\n return v.validate(instance, schema, options);\n};\n","'use strict';\n\nvar urilib = require('url');\n\nvar attribute = require('./attribute');\nvar helpers = require('./helpers');\nvar scanSchema = require('./scan').scan;\nvar ValidatorResult = helpers.ValidatorResult;\nvar SchemaError = helpers.SchemaError;\nvar SchemaContext = helpers.SchemaContext;\n//var anonymousBase = 'vnd.jsonschema:///';\nvar anonymousBase = '/';\n\n/**\n * Creates a new Validator object\n * @name Validator\n * @constructor\n */\nvar Validator = function Validator () {\n // Allow a validator instance to override global custom formats or to have their\n // own custom formats.\n this.customFormats = Object.create(Validator.prototype.customFormats);\n this.schemas = {};\n this.unresolvedRefs = [];\n\n // Use Object.create to make this extensible without Validator instances stepping on each other's toes.\n this.types = Object.create(types);\n this.attributes = Object.create(attribute.validators);\n};\n\n// Allow formats to be registered globally.\nValidator.prototype.customFormats = {};\n\n// Hint at the presence of a property\nValidator.prototype.schemas = null;\nValidator.prototype.types = null;\nValidator.prototype.attributes = null;\nValidator.prototype.unresolvedRefs = null;\n\n/**\n * Adds a schema with a certain urn to the Validator instance.\n * @param schema\n * @param urn\n * @return {Object}\n */\nValidator.prototype.addSchema = function addSchema (schema, base) {\n var self = this;\n if (!schema) {\n return null;\n }\n var scan = scanSchema(base||anonymousBase, schema);\n var ourUri = base || schema.id;\n for(var uri in scan.id){\n this.schemas[uri] = scan.id[uri];\n }\n for(var uri in scan.ref){\n this.unresolvedRefs.push(uri);\n }\n this.unresolvedRefs = this.unresolvedRefs.filter(function(uri){\n return typeof self.schemas[uri]==='undefined';\n });\n return this.schemas[ourUri];\n};\n\nValidator.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {\n if(!(schemas instanceof Array)) return;\n for(var i=0; i\", schema);\n }\n var subschema = helpers.objectGetPath(ctx.schemas[document], fragment.substr(1));\n if(subschema===undefined){\n throw new SchemaError(\"no such schema \" + fragment + \" located in <\" + document + \">\", schema);\n }\n return {subschema: subschema, switchSchema: switchSchema};\n};\n\n/**\n * Tests whether the instance if of a certain type.\n * @private\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @param type\n * @return {boolean}\n */\nValidator.prototype.testType = function validateType (instance, schema, options, ctx, type) {\n if (typeof this.types[type] == 'function') {\n return this.types[type].call(this, instance);\n }\n if (type && typeof type == 'object') {\n var res = this.validateSchema(instance, type, options, ctx);\n return res === undefined || !(res && res.errors.length);\n }\n // Undefined or properties not on the list are acceptable, same as not being defined\n return true;\n};\n\nvar types = Validator.prototype.types = {};\ntypes.string = function testString (instance) {\n return typeof instance == 'string';\n};\ntypes.number = function testNumber (instance) {\n // isFinite returns false for NaN, Infinity, and -Infinity\n return typeof instance == 'number' && isFinite(instance);\n};\ntypes.integer = function testInteger (instance) {\n return (typeof instance == 'number') && instance % 1 === 0;\n};\ntypes.boolean = function testBoolean (instance) {\n return typeof instance == 'boolean';\n};\ntypes.array = function testArray (instance) {\n return Array.isArray(instance);\n};\ntypes['null'] = function testNull (instance) {\n return instance === null;\n};\ntypes.date = function testDate (instance) {\n return instance instanceof Date;\n};\ntypes.any = function testAny (instance) {\n return true;\n};\ntypes.object = function testObject (instance) {\n // TODO: fix this - see #15\n return instance && (typeof instance) === 'object' && !(instance instanceof Array) && !(instance instanceof Date);\n};\n\nmodule.exports = Validator;\n","'use strict';\n\nvar helpers = require('./helpers');\n\n/** @type ValidatorResult */\nvar ValidatorResult = helpers.ValidatorResult;\n/** @type SchemaError */\nvar SchemaError = helpers.SchemaError;\n\nvar attribute = {};\n\nattribute.ignoreProperties = {\n // informative properties\n 'id': true,\n 'default': true,\n 'description': true,\n 'title': true,\n // arguments to other properties\n 'exclusiveMinimum': true,\n 'exclusiveMaximum': true,\n 'additionalItems': true,\n // special-handled properties\n '$schema': true,\n '$ref': true,\n 'extends': true\n};\n\n/**\n * @name validators\n */\nvar validators = attribute.validators = {};\n\n/**\n * Validates whether the instance if of a certain type\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {ValidatorResult|null}\n */\nvalidators.type = function validateType (instance, schema, options, ctx) {\n // Ignore undefined instances\n if (instance === undefined) {\n return null;\n }\n var result = new ValidatorResult(instance, schema, options, ctx);\n var types = Array.isArray(schema.type) ? schema.type : [schema.type];\n if (!types.some(this.testType.bind(this, instance, schema, options, ctx))) {\n var list = types.map(function (v) {\n return v.id && ('<' + v.id + '>') || (v+'');\n });\n result.addError({\n name: 'type',\n argument: list,\n message: \"is not of a type(s) \" + list,\n });\n }\n return result;\n};\n\nfunction testSchemaNoThrow(instance, options, ctx, callback, schema){\n var throwError = options.throwError;\n options.throwError = false;\n var res = this.validateSchema(instance, schema, options, ctx);\n options.throwError = throwError;\n\n if (! res.valid && callback instanceof Function) {\n callback(res);\n }\n return res.valid;\n}\n\n/**\n * Validates whether the instance matches some of the given schemas\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {ValidatorResult|null}\n */\nvalidators.anyOf = function validateAnyOf (instance, schema, options, ctx) {\n // Ignore undefined instances\n if (instance === undefined) {\n return null;\n }\n var result = new ValidatorResult(instance, schema, options, ctx);\n var inner = new ValidatorResult(instance, schema, options, ctx);\n if (!Array.isArray(schema.anyOf)){\n throw new SchemaError(\"anyOf must be an array\");\n }\n if (!schema.anyOf.some(\n testSchemaNoThrow.bind(\n this, instance, options, ctx, function(res){inner.importErrors(res);}\n ))) {\n var list = schema.anyOf.map(function (v, i) {\n return (v.id && ('<' + v.id + '>')) || (v.title && JSON.stringify(v.title)) || (v['$ref'] && ('<' + v['$ref'] + '>')) || '[subschema '+i+']';\n });\n if (options.nestedErrors) {\n result.importErrors(inner);\n }\n result.addError({\n name: 'anyOf',\n argument: list,\n message: \"is not any of \" + list.join(','),\n });\n }\n return result;\n};\n\n/**\n * Validates whether the instance matches every given schema\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {String|null}\n */\nvalidators.allOf = function validateAllOf (instance, schema, options, ctx) {\n // Ignore undefined instances\n if (instance === undefined) {\n return null;\n }\n if (!Array.isArray(schema.allOf)){\n throw new SchemaError(\"allOf must be an array\");\n }\n var result = new ValidatorResult(instance, schema, options, ctx);\n var self = this;\n schema.allOf.forEach(function(v, i){\n var valid = self.validateSchema(instance, v, options, ctx);\n if(!valid.valid){\n var msg = (v.id && ('<' + v.id + '>')) || (v.title && JSON.stringify(v.title)) || (v['$ref'] && ('<' + v['$ref'] + '>')) || '[subschema '+i+']';\n result.addError({\n name: 'allOf',\n argument: { id: msg, length: valid.errors.length, valid: valid },\n message: 'does not match allOf schema ' + msg + ' with ' + valid.errors.length + ' error[s]:',\n });\n result.importErrors(valid);\n }\n });\n return result;\n};\n\n/**\n * Validates whether the instance matches exactly one of the given schemas\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {String|null}\n */\nvalidators.oneOf = function validateOneOf (instance, schema, options, ctx) {\n // Ignore undefined instances\n if (instance === undefined) {\n return null;\n }\n if (!Array.isArray(schema.oneOf)){\n throw new SchemaError(\"oneOf must be an array\");\n }\n var result = new ValidatorResult(instance, schema, options, ctx);\n var inner = new ValidatorResult(instance, schema, options, ctx);\n var count = schema.oneOf.filter(\n testSchemaNoThrow.bind(\n this, instance, options, ctx, function(res) {inner.importErrors(res);}\n ) ).length;\n var list = schema.oneOf.map(function (v, i) {\n return (v.id && ('<' + v.id + '>')) || (v.title && JSON.stringify(v.title)) || (v['$ref'] && ('<' + v['$ref'] + '>')) || '[subschema '+i+']';\n });\n if (count!==1) {\n if (options.nestedErrors) {\n result.importErrors(inner);\n }\n result.addError({\n name: 'oneOf',\n argument: list,\n message: \"is not exactly one from \" + list.join(','),\n });\n }\n return result;\n};\n\n/**\n * Validates properties\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {String|null|ValidatorResult}\n */\nvalidators.properties = function validateProperties (instance, schema, options, ctx) {\n if(!this.types.object(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n var properties = schema.properties || {};\n for (var property in properties) {\n if (typeof options.preValidateProperty == 'function') {\n options.preValidateProperty(instance, property, properties[property], options, ctx);\n }\n\n var prop = Object.hasOwnProperty.call(instance, property) ? instance[property] : undefined;\n var res = this.validateSchema(prop, properties[property], options, ctx.makeChild(properties[property], property));\n if(res.instance !== result.instance[property]) result.instance[property] = res.instance;\n result.importErrors(res);\n }\n return result;\n};\n\n/**\n * Test a specific property within in instance against the additionalProperties schema attribute\n * This ignores properties with definitions in the properties schema attribute, but no other attributes.\n * If too many more types of property-existance tests pop up they may need their own class of tests (like `type` has)\n * @private\n * @return {boolean}\n */\nfunction testAdditionalProperty (instance, schema, options, ctx, property, result) {\n if(!this.types.object(instance)) return;\n if (schema.properties && schema.properties[property] !== undefined) {\n return;\n }\n if (schema.additionalProperties === false) {\n result.addError({\n name: 'additionalProperties',\n argument: property,\n message: \"additionalProperty \" + JSON.stringify(property) + \" exists in instance when not allowed\",\n });\n } else {\n var additionalProperties = schema.additionalProperties || {};\n\n if (typeof options.preValidateProperty == 'function') {\n options.preValidateProperty(instance, property, additionalProperties, options, ctx);\n }\n\n var res = this.validateSchema(instance[property], additionalProperties, options, ctx.makeChild(additionalProperties, property));\n if(res.instance !== result.instance[property]) result.instance[property] = res.instance;\n result.importErrors(res);\n }\n}\n\n/**\n * Validates patternProperties\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {String|null|ValidatorResult}\n */\nvalidators.patternProperties = function validatePatternProperties (instance, schema, options, ctx) {\n if(!this.types.object(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n var patternProperties = schema.patternProperties || {};\n\n for (var property in instance) {\n var test = true;\n for (var pattern in patternProperties) {\n var expr = new RegExp(pattern);\n if (!expr.test(property)) {\n continue;\n }\n test = false;\n\n if (typeof options.preValidateProperty == 'function') {\n options.preValidateProperty(instance, property, patternProperties[pattern], options, ctx);\n }\n\n var res = this.validateSchema(instance[property], patternProperties[pattern], options, ctx.makeChild(patternProperties[pattern], property));\n if(res.instance !== result.instance[property]) result.instance[property] = res.instance;\n result.importErrors(res);\n }\n if (test) {\n testAdditionalProperty.call(this, instance, schema, options, ctx, property, result);\n }\n }\n\n return result;\n};\n\n/**\n * Validates additionalProperties\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {String|null|ValidatorResult}\n */\nvalidators.additionalProperties = function validateAdditionalProperties (instance, schema, options, ctx) {\n if(!this.types.object(instance)) return;\n // if patternProperties is defined then we'll test when that one is called instead\n if (schema.patternProperties) {\n return null;\n }\n var result = new ValidatorResult(instance, schema, options, ctx);\n for (var property in instance) {\n testAdditionalProperty.call(this, instance, schema, options, ctx, property, result);\n }\n return result;\n};\n\n/**\n * Validates whether the instance value is at least of a certain length, when the instance value is a string.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.minProperties = function validateMinProperties (instance, schema, options, ctx) {\n if (!this.types.object(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n var keys = Object.keys(instance);\n if (!(keys.length >= schema.minProperties)) {\n result.addError({\n name: 'minProperties',\n argument: schema.minProperties,\n message: \"does not meet minimum property length of \" + schema.minProperties,\n })\n }\n return result;\n};\n\n/**\n * Validates whether the instance value is at most of a certain length, when the instance value is a string.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.maxProperties = function validateMaxProperties (instance, schema, options, ctx) {\n if (!this.types.object(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n var keys = Object.keys(instance);\n if (!(keys.length <= schema.maxProperties)) {\n result.addError({\n name: 'maxProperties',\n argument: schema.maxProperties,\n message: \"does not meet maximum property length of \" + schema.maxProperties,\n });\n }\n return result;\n};\n\n/**\n * Validates items when instance is an array\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {String|null|ValidatorResult}\n */\nvalidators.items = function validateItems (instance, schema, options, ctx) {\n var self = this;\n if (!this.types.array(instance)) return;\n if (!schema.items) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n instance.every(function (value, i) {\n var items = Array.isArray(schema.items) ? (schema.items[i] || schema.additionalItems) : schema.items;\n if (items === undefined) {\n return true;\n }\n if (items === false) {\n result.addError({\n name: 'items',\n message: \"additionalItems not permitted\",\n });\n return false;\n }\n var res = self.validateSchema(value, items, options, ctx.makeChild(items, i));\n if(res.instance !== result.instance[i]) result.instance[i] = res.instance;\n result.importErrors(res);\n return true;\n });\n return result;\n};\n\n/**\n * Validates minimum and exclusiveMinimum when the type of the instance value is a number.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.minimum = function validateMinimum (instance, schema, options, ctx) {\n if (!this.types.number(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n var valid = true;\n if (schema.exclusiveMinimum && schema.exclusiveMinimum === true) {\n valid = instance > schema.minimum;\n } else {\n valid = instance >= schema.minimum;\n }\n if (!valid) {\n result.addError({\n name: 'minimum',\n argument: schema.minimum,\n message: \"must have a minimum value of \" + schema.minimum,\n });\n }\n return result;\n};\n\n/**\n * Validates maximum and exclusiveMaximum when the type of the instance value is a number.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.maximum = function validateMaximum (instance, schema, options, ctx) {\n if (!this.types.number(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n var valid;\n if (schema.exclusiveMaximum && schema.exclusiveMaximum === true) {\n valid = instance < schema.maximum;\n } else {\n valid = instance <= schema.maximum;\n }\n if (!valid) {\n result.addError({\n name: 'maximum',\n argument: schema.maximum,\n message: \"must have a maximum value of \" + schema.maximum,\n });\n }\n return result;\n};\n\n/**\n * Perform validation for multipleOf and divisibleBy, which are essentially the same.\n * @param instance\n * @param schema\n * @param validationType\n * @param errorMessage\n * @returns {String|null}\n */\nvar validateMultipleOfOrDivisbleBy = function validateMultipleOfOrDivisbleBy (instance, schema, options, ctx, validationType, errorMessage) {\n if (!this.types.number(instance)) return;\n\n var validationArgument = schema[validationType];\n if (validationArgument == 0) {\n throw new SchemaError(validationType + \" cannot be zero\");\n }\n\n var result = new ValidatorResult(instance, schema, options, ctx);\n\n var instanceDecimals = helpers.getDecimalPlaces(instance);\n var divisorDecimals = helpers.getDecimalPlaces(validationArgument);\n\n var maxDecimals = Math.max(instanceDecimals , divisorDecimals);\n var multiplier = Math.pow(10, maxDecimals);\n\n if (Math.round(instance * multiplier) % Math.round(validationArgument * multiplier) !== 0) {\n result.addError({\n name: validationType,\n argument: validationArgument,\n message: errorMessage + JSON.stringify(validationArgument)\n });\n }\n\n return result;\n};\n\n/**\n * Validates divisibleBy when the type of the instance value is a number.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.multipleOf = function validateMultipleOf (instance, schema, options, ctx) {\n return validateMultipleOfOrDivisbleBy.call(this, instance, schema, options, ctx, \"multipleOf\", \"is not a multiple of (divisible by) \");\n};\n\n/**\n * Validates multipleOf when the type of the instance value is a number.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.divisibleBy = function validateDivisibleBy (instance, schema, options, ctx) {\n return validateMultipleOfOrDivisbleBy.call(this, instance, schema, options, ctx, \"divisibleBy\", \"is not divisible by (multiple of) \");\n};\n\n/**\n * Validates whether the instance value is present.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.required = function validateRequired (instance, schema, options, ctx) {\n var result = new ValidatorResult(instance, schema, options, ctx);\n if (instance === undefined && schema.required === true) {\n // A boolean form is implemented for reverse-compatability with schemas written against older drafts\n result.addError({\n name: 'required',\n message: \"is required\"\n });\n } else if (this.types.object(instance) && Array.isArray(schema.required)) {\n schema.required.forEach(function(n){\n if(instance[n]===undefined){\n result.addError({\n name: 'required',\n argument: n,\n message: \"requires property \" + JSON.stringify(n),\n });\n }\n });\n }\n return result;\n};\n\n/**\n * Validates whether the instance value matches the regular expression, when the instance value is a string.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.pattern = function validatePattern (instance, schema, options, ctx) {\n if (!this.types.string(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n if (!instance.match(schema.pattern)) {\n result.addError({\n name: 'pattern',\n argument: schema.pattern,\n message: \"does not match pattern \" + JSON.stringify(schema.pattern),\n });\n }\n return result;\n};\n\n/**\n * Validates whether the instance value is of a certain defined format or a custom\n * format.\n * The following formats are supported for string types:\n * - date-time\n * - date\n * - time\n * - ip-address\n * - ipv6\n * - uri\n * - color\n * - host-name\n * - alpha\n * - alpha-numeric\n * - utc-millisec\n * @param instance\n * @param schema\n * @param [options]\n * @param [ctx]\n * @return {String|null}\n */\nvalidators.format = function validateFormat (instance, schema, options, ctx) {\n if (instance===undefined) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n if (!result.disableFormat && !helpers.isFormat(instance, schema.format, this)) {\n result.addError({\n name: 'format',\n argument: schema.format,\n message: \"does not conform to the \" + JSON.stringify(schema.format) + \" format\",\n });\n }\n return result;\n};\n\n/**\n * Validates whether the instance value is at least of a certain length, when the instance value is a string.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.minLength = function validateMinLength (instance, schema, options, ctx) {\n if (!this.types.string(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n var hsp = instance.match(/[\\uDC00-\\uDFFF]/g);\n var length = instance.length - (hsp ? hsp.length : 0);\n if (!(length >= schema.minLength)) {\n result.addError({\n name: 'minLength',\n argument: schema.minLength,\n message: \"does not meet minimum length of \" + schema.minLength,\n });\n }\n return result;\n};\n\n/**\n * Validates whether the instance value is at most of a certain length, when the instance value is a string.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.maxLength = function validateMaxLength (instance, schema, options, ctx) {\n if (!this.types.string(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n // TODO if this was already computed in \"minLength\", use that value instead of re-computing\n var hsp = instance.match(/[\\uDC00-\\uDFFF]/g);\n var length = instance.length - (hsp ? hsp.length : 0);\n if (!(length <= schema.maxLength)) {\n result.addError({\n name: 'maxLength',\n argument: schema.maxLength,\n message: \"does not meet maximum length of \" + schema.maxLength,\n });\n }\n return result;\n};\n\n/**\n * Validates whether instance contains at least a minimum number of items, when the instance is an Array.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.minItems = function validateMinItems (instance, schema, options, ctx) {\n if (!this.types.array(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n if (!(instance.length >= schema.minItems)) {\n result.addError({\n name: 'minItems',\n argument: schema.minItems,\n message: \"does not meet minimum length of \" + schema.minItems,\n });\n }\n return result;\n};\n\n/**\n * Validates whether instance contains no more than a maximum number of items, when the instance is an Array.\n * @param instance\n * @param schema\n * @return {String|null}\n */\nvalidators.maxItems = function validateMaxItems (instance, schema, options, ctx) {\n if (!this.types.array(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n if (!(instance.length <= schema.maxItems)) {\n result.addError({\n name: 'maxItems',\n argument: schema.maxItems,\n message: \"does not meet maximum length of \" + schema.maxItems,\n });\n }\n return result;\n};\n\n/**\n * Validates that every item in an instance array is unique, when instance is an array\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {String|null|ValidatorResult}\n */\nvalidators.uniqueItems = function validateUniqueItems (instance, schema, options, ctx) {\n if (!this.types.array(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n function testArrays (v, i, a) {\n for (var j = i + 1; j < a.length; j++) if (helpers.deepCompareStrict(v, a[j])) {\n return false;\n }\n return true;\n }\n if (!instance.every(testArrays)) {\n result.addError({\n name: 'uniqueItems',\n message: \"contains duplicate item\",\n });\n }\n return result;\n};\n\n/**\n * Deep compares arrays for duplicates\n * @param v\n * @param i\n * @param a\n * @private\n * @return {boolean}\n */\nfunction testArrays (v, i, a) {\n var j, len = a.length;\n for (j = i + 1, len; j < len; j++) {\n if (helpers.deepCompareStrict(v, a[j])) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Validates whether there are no duplicates, when the instance is an Array.\n * @param instance\n * @return {String|null}\n */\nvalidators.uniqueItems = function validateUniqueItems (instance, schema, options, ctx) {\n if (!this.types.array(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n if (!instance.every(testArrays)) {\n result.addError({\n name: 'uniqueItems',\n message: \"contains duplicate item\",\n });\n }\n return result;\n};\n\n/**\n * Validate for the presence of dependency properties, if the instance is an object.\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {null|ValidatorResult}\n */\nvalidators.dependencies = function validateDependencies (instance, schema, options, ctx) {\n if (!this.types.object(instance)) return;\n var result = new ValidatorResult(instance, schema, options, ctx);\n for (var property in schema.dependencies) {\n if (instance[property] === undefined) {\n continue;\n }\n var dep = schema.dependencies[property];\n var childContext = ctx.makeChild(dep, property);\n if (typeof dep == 'string') {\n dep = [dep];\n }\n if (Array.isArray(dep)) {\n dep.forEach(function (prop) {\n if (instance[prop] === undefined) {\n result.addError({\n // FIXME there's two different \"dependencies\" errors here with slightly different outputs\n // Can we make these the same? Or should we create different error types?\n name: 'dependencies',\n argument: childContext.propertyPath,\n message: \"property \" + prop + \" not found, required by \" + childContext.propertyPath,\n });\n }\n });\n } else {\n var res = this.validateSchema(instance, dep, options, childContext);\n if(result.instance !== res.instance) result.instance = res.instance;\n if (res && res.errors.length) {\n result.addError({\n name: 'dependencies',\n argument: childContext.propertyPath,\n message: \"does not meet dependency required by \" + childContext.propertyPath,\n });\n result.importErrors(res);\n }\n }\n }\n return result;\n};\n\n/**\n * Validates whether the instance value is one of the enumerated values.\n *\n * @param instance\n * @param schema\n * @return {ValidatorResult|null}\n */\nvalidators['enum'] = function validateEnum (instance, schema, options, ctx) {\n if (instance === undefined) {\n return null;\n }\n if (!Array.isArray(schema['enum'])) {\n throw new SchemaError(\"enum expects an array\", schema);\n }\n var result = new ValidatorResult(instance, schema, options, ctx);\n if (!schema['enum'].some(helpers.deepCompareStrict.bind(null, instance))) {\n result.addError({\n name: 'enum',\n argument: schema['enum'],\n message: \"is not one of enum values: \" + schema['enum'].map(String).join(','),\n });\n }\n return result;\n};\n\n/**\n * Validates whether the instance exactly matches a given value\n *\n * @param instance\n * @param schema\n * @return {ValidatorResult|null}\n */\nvalidators['const'] = function validateEnum (instance, schema, options, ctx) {\n if (instance === undefined) {\n return null;\n }\n var result = new ValidatorResult(instance, schema, options, ctx);\n if (!helpers.deepCompareStrict(schema['const'], instance)) {\n result.addError({\n name: 'const',\n argument: schema['const'],\n message: \"does not exactly match expected constant: \" + schema['const'],\n });\n }\n return result;\n};\n\n/**\n * Validates whether the instance if of a prohibited type.\n * @param instance\n * @param schema\n * @param options\n * @param ctx\n * @return {null|ValidatorResult}\n */\nvalidators.not = validators.disallow = function validateNot (instance, schema, options, ctx) {\n var self = this;\n if(instance===undefined) return null;\n var result = new ValidatorResult(instance, schema, options, ctx);\n var notTypes = schema.not || schema.disallow;\n if(!notTypes) return null;\n if(!Array.isArray(notTypes)) notTypes=[notTypes];\n notTypes.forEach(function (type) {\n if (self.testType(instance, schema, options, ctx, type)) {\n var schemaId = type && type.id && ('<' + type.id + '>') || type;\n result.addError({\n name: 'not',\n argument: schemaId,\n message: \"is of prohibited type \" + schemaId,\n });\n }\n });\n return result;\n};\n\nmodule.exports = attribute;\n","'use strict'\n\n// high-level commands\nexports.c = exports.create = require('./lib/create.js')\nexports.r = exports.replace = require('./lib/replace.js')\nexports.t = exports.list = require('./lib/list.js')\nexports.u = exports.update = require('./lib/update.js')\nexports.x = exports.extract = require('./lib/extract.js')\n\n// classes\nexports.Pack = require('./lib/pack.js')\nexports.Unpack = require('./lib/unpack.js')\nexports.Parse = require('./lib/parse.js')\nexports.ReadEntry = require('./lib/read-entry.js')\nexports.WriteEntry = require('./lib/write-entry.js')\nexports.Header = require('./lib/header.js')\nexports.Pax = require('./lib/pax.js')\nexports.types = require('./lib/types.js')\n","'use strict'\n\n// tar -c\nconst hlo = require('./high-level-opt.js')\n\nconst Pack = require('./pack.js')\nconst fs = require('fs')\nconst fsm = require('fs-minipass')\nconst t = require('./list.js')\nconst path = require('path')\n\nconst c = module.exports = (opt_, files, cb) => {\n if (typeof files === 'function')\n cb = files\n\n if (Array.isArray(opt_))\n files = opt_, opt_ = {}\n\n if (!files || !Array.isArray(files) || !files.length)\n throw new TypeError('no files or directories specified')\n\n files = Array.from(files)\n\n const opt = hlo(opt_)\n\n if (opt.sync && typeof cb === 'function')\n throw new TypeError('callback not supported for sync tar functions')\n\n if (!opt.file && typeof cb === 'function')\n throw new TypeError('callback only supported with file option')\n\n return opt.file && opt.sync ? createFileSync(opt, files)\n : opt.file ? createFile(opt, files, cb)\n : opt.sync ? createSync(opt, files)\n : create(opt, files)\n}\n\nconst createFileSync = (opt, files) => {\n const p = new Pack.Sync(opt)\n const stream = new fsm.WriteStreamSync(opt.file, {\n mode: opt.mode || 0o666\n })\n p.pipe(stream)\n addFilesSync(p, files)\n}\n\nconst createFile = (opt, files, cb) => {\n const p = new Pack(opt)\n const stream = new fsm.WriteStream(opt.file, {\n mode: opt.mode || 0o666\n })\n p.pipe(stream)\n\n const promise = new Promise((res, rej) => {\n stream.on('error', rej)\n stream.on('close', res)\n p.on('error', rej)\n })\n\n addFilesAsync(p, files)\n\n return cb ? promise.then(cb, cb) : promise\n}\n\nconst addFilesSync = (p, files) => {\n files.forEach(file => {\n if (file.charAt(0) === '@')\n t({\n file: path.resolve(p.cwd, file.substr(1)),\n sync: true,\n noResume: true,\n onentry: entry => p.add(entry)\n })\n else\n p.add(file)\n })\n p.end()\n}\n\nconst addFilesAsync = (p, files) => {\n while (files.length) {\n const file = files.shift()\n if (file.charAt(0) === '@')\n return t({\n file: path.resolve(p.cwd, file.substr(1)),\n noResume: true,\n onentry: entry => p.add(entry)\n }).then(_ => addFilesAsync(p, files))\n else\n p.add(file)\n }\n p.end()\n}\n\nconst createSync = (opt, files) => {\n const p = new Pack.Sync(opt)\n addFilesSync(p, files)\n return p\n}\n\nconst create = (opt, files) => {\n const p = new Pack(opt)\n addFilesAsync(p, files)\n return p\n}\n","'use strict'\nvar Yallist = require('./yallist.js')\n\nYallist.prototype[Symbol.iterator] = function* () {\n for (let walker = this.head; walker; walker = walker.next) {\n yield walker.value\n }\n}\n","module.exports = require(\"string_decoder\");","module.exports = Object.freeze({\n Z_NO_FLUSH: 0,\n Z_PARTIAL_FLUSH: 1,\n Z_SYNC_FLUSH: 2,\n Z_FULL_FLUSH: 3,\n Z_FINISH: 4,\n Z_BLOCK: 5,\n Z_OK: 0,\n Z_STREAM_END: 1,\n Z_NEED_DICT: 2,\n Z_ERRNO: -1,\n Z_STREAM_ERROR: -2,\n Z_DATA_ERROR: -3,\n Z_MEM_ERROR: -4,\n Z_BUF_ERROR: -5,\n Z_VERSION_ERROR: -6,\n Z_NO_COMPRESSION: 0,\n Z_BEST_SPEED: 1,\n Z_BEST_COMPRESSION: 9,\n Z_DEFAULT_COMPRESSION: -1,\n Z_FILTERED: 1,\n Z_HUFFMAN_ONLY: 2,\n Z_RLE: 3,\n Z_FIXED: 4,\n Z_DEFAULT_STRATEGY: 0,\n ZLIB_VERNUM: 4736,\n DEFLATE: 1,\n INFLATE: 2,\n GZIP: 3,\n GUNZIP: 4,\n DEFLATERAW: 5,\n INFLATERAW: 6,\n UNZIP: 7,\n Z_MIN_WINDOWBITS: 8,\n Z_MAX_WINDOWBITS: 15,\n Z_DEFAULT_WINDOWBITS: 15,\n Z_MIN_CHUNK: 64,\n Z_MAX_CHUNK: Infinity,\n Z_DEFAULT_CHUNK: 16384,\n Z_MIN_MEMLEVEL: 1,\n Z_MAX_MEMLEVEL: 9,\n Z_DEFAULT_MEMLEVEL: 8,\n Z_MIN_LEVEL: -1,\n Z_MAX_LEVEL: 9,\n Z_DEFAULT_LEVEL: -1\n})\n","'use strict'\n// Tar can encode large and negative numbers using a leading byte of\n// 0xff for negative, and 0x80 for positive. The trailing byte in the\n// section will always be 0x20, or in some implementations 0x00.\n// this module encodes and decodes these things.\n\nconst encode = exports.encode = (num, buf) => {\n buf[buf.length - 1] = 0x20\n if (num < 0)\n encodeNegative(num, buf)\n else\n encodePositive(num, buf)\n return buf\n}\n\nconst encodePositive = (num, buf) => {\n buf[0] = 0x80\n for (var i = buf.length - 2; i > 0; i--) {\n if (num === 0)\n buf[i] = 0\n else {\n buf[i] = num % 0x100\n num = Math.floor(num / 0x100)\n }\n }\n}\n\nconst encodeNegative = (num, buf) => {\n buf[0] = 0xff\n var flipped = false\n num = num * -1\n for (var i = buf.length - 2; i > 0; i--) {\n var byte\n if (num === 0)\n byte = 0\n else {\n byte = num % 0x100\n num = Math.floor(num / 0x100)\n }\n if (flipped)\n buf[i] = onesComp(byte)\n else if (byte === 0)\n buf[i] = 0\n else {\n flipped = true\n buf[i] = twosComp(byte)\n }\n }\n}\n\nconst parse = exports.parse = (buf) => {\n var post = buf[buf.length - 1]\n var pre = buf[0]\n return pre === 0x80 ? pos(buf.slice(1, buf.length - 1))\n : twos(buf.slice(1, buf.length - 1))\n}\n\nconst twos = (buf) => {\n var len = buf.length\n var sum = 0\n var flipped = false\n for (var i = len - 1; i > -1; i--) {\n var byte = buf[i]\n var f\n if (flipped)\n f = onesComp(byte)\n else if (byte === 0)\n f = byte\n else {\n flipped = true\n f = twosComp(byte)\n }\n if (f !== 0)\n sum += f * Math.pow(256, len - i - 1)\n }\n return sum * -1\n}\n\nconst pos = (buf) => {\n var len = buf.length\n var sum = 0\n for (var i = len - 1; i > -1; i--) {\n var byte = buf[i]\n if (byte !== 0)\n sum += byte * Math.pow(256, len - i - 1)\n }\n return sum\n}\n\nconst onesComp = byte => (0xff ^ byte) & 0xff\n\nconst twosComp = byte => ((0xff ^ byte) + 1) & 0xff\n","'use strict'\nmodule.exports = (mode, isDir) => {\n mode &= 0o7777\n // if dirs are readable, then they should be listable\n if (isDir) {\n if (mode & 0o400)\n mode |= 0o100\n if (mode & 0o40)\n mode |= 0o10\n if (mode & 0o4)\n mode |= 0o1\n }\n return mode\n}\n","'use strict'\n\n// tar -u\n\nconst hlo = require('./high-level-opt.js')\nconst r = require('./replace.js')\n// just call tar.r with the filter and mtimeCache\n\nconst u = module.exports = (opt_, files, cb) => {\n const opt = hlo(opt_)\n\n if (!opt.file)\n throw new TypeError('file is required')\n\n if (opt.gzip)\n throw new TypeError('cannot append to compressed archives')\n\n if (!files || !Array.isArray(files) || !files.length)\n throw new TypeError('no files or directories specified')\n\n files = Array.from(files)\n\n mtimeFilter(opt)\n return r(opt, files, cb)\n}\n\nconst mtimeFilter = opt => {\n const filter = opt.filter\n\n if (!opt.mtimeCache)\n opt.mtimeCache = new Map()\n\n opt.filter = filter ? (path, stat) =>\n filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)\n : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)\n}\n","'use strict'\n\n// tar -x\nconst hlo = require('./high-level-opt.js')\nconst Unpack = require('./unpack.js')\nconst fs = require('fs')\nconst fsm = require('fs-minipass')\nconst path = require('path')\n\nconst x = module.exports = (opt_, files, cb) => {\n if (typeof opt_ === 'function')\n cb = opt_, files = null, opt_ = {}\n else if (Array.isArray(opt_))\n files = opt_, opt_ = {}\n\n if (typeof files === 'function')\n cb = files, files = null\n\n if (!files)\n files = []\n else\n files = Array.from(files)\n\n const opt = hlo(opt_)\n\n if (opt.sync && typeof cb === 'function')\n throw new TypeError('callback not supported for sync tar functions')\n\n if (!opt.file && typeof cb === 'function')\n throw new TypeError('callback only supported with file option')\n\n if (files.length)\n filesFilter(opt, files)\n\n return opt.file && opt.sync ? extractFileSync(opt)\n : opt.file ? extractFile(opt, cb)\n : opt.sync ? extractSync(opt)\n : extract(opt)\n}\n\n// construct a filter that limits the file entries listed\n// include child entries if a dir is included\nconst filesFilter = (opt, files) => {\n const map = new Map(files.map(f => [f.replace(/\\/+$/, ''), true]))\n const filter = opt.filter\n\n const mapHas = (file, r) => {\n const root = r || path.parse(file).root || '.'\n const ret = file === root ? false\n : map.has(file) ? map.get(file)\n : mapHas(path.dirname(file), root)\n\n map.set(file, ret)\n return ret\n }\n\n opt.filter = filter\n ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\\/+$/, ''))\n : file => mapHas(file.replace(/\\/+$/, ''))\n}\n\nconst extractFileSync = opt => {\n const u = new Unpack.Sync(opt)\n\n const file = opt.file\n let threw = true\n let fd\n const stat = fs.statSync(file)\n // This trades a zero-byte read() syscall for a stat\n // However, it will usually result in less memory allocation\n const readSize = opt.maxReadSize || 16*1024*1024\n const stream = new fsm.ReadStreamSync(file, {\n readSize: readSize,\n size: stat.size\n })\n stream.pipe(u)\n}\n\nconst extractFile = (opt, cb) => {\n const u = new Unpack(opt)\n const readSize = opt.maxReadSize || 16*1024*1024\n\n const file = opt.file\n const p = new Promise((resolve, reject) => {\n u.on('error', reject)\n u.on('close', resolve)\n\n // This trades a zero-byte read() syscall for a stat\n // However, it will usually result in less memory allocation\n fs.stat(file, (er, stat) => {\n if (er)\n reject(er)\n else {\n const stream = new fsm.ReadStream(file, {\n readSize: readSize,\n size: stat.size\n })\n stream.on('error', reject)\n stream.pipe(u)\n }\n })\n })\n return cb ? p.then(cb, cb) : p\n}\n\nconst extractSync = opt => {\n return new Unpack.Sync(opt)\n}\n\nconst extract = opt => {\n return new Unpack(opt)\n}\n","'use strict'\n// wrapper around mkdirp for tar's needs.\n\n// TODO: This should probably be a class, not functionally\n// passing around state in a gazillion args.\n\nconst mkdirp = require('mkdirp')\nconst fs = require('fs')\nconst path = require('path')\nconst chownr = require('chownr')\n\nclass SymlinkError extends Error {\n constructor (symlink, path) {\n super('Cannot extract through symbolic link')\n this.path = path\n this.symlink = symlink\n }\n\n get name () {\n return 'SylinkError'\n }\n}\n\nclass CwdError extends Error {\n constructor (path, code) {\n super(code + ': Cannot cd into \\'' + path + '\\'')\n this.path = path\n this.code = code\n }\n\n get name () {\n return 'CwdError'\n }\n}\n\nconst mkdir = module.exports = (dir, opt, cb) => {\n // if there's any overlap between mask and mode,\n // then we'll need an explicit chmod\n const umask = opt.umask\n const mode = opt.mode | 0o0700\n const needChmod = (mode & umask) !== 0\n\n const uid = opt.uid\n const gid = opt.gid\n const doChown = typeof uid === 'number' &&\n typeof gid === 'number' &&\n ( uid !== opt.processUid || gid !== opt.processGid )\n\n const preserve = opt.preserve\n const unlink = opt.unlink\n const cache = opt.cache\n const cwd = opt.cwd\n\n const done = (er, created) => {\n if (er)\n cb(er)\n else {\n cache.set(dir, true)\n if (created && doChown)\n chownr(created, uid, gid, er => done(er))\n else if (needChmod)\n fs.chmod(dir, mode, cb)\n else\n cb()\n }\n }\n\n if (cache && cache.get(dir) === true)\n return done()\n\n if (dir === cwd)\n return fs.lstat(dir, (er, st) => {\n if (er || !st.isDirectory())\n er = new CwdError(dir, er && er.code || 'ENOTDIR')\n done(er)\n })\n\n if (preserve)\n return mkdirp(dir, mode, done)\n\n const sub = path.relative(cwd, dir)\n const parts = sub.split(/\\/|\\\\/)\n mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)\n}\n\nconst mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {\n if (!parts.length)\n return cb(null, created)\n const p = parts.shift()\n const part = base + '/' + p\n if (cache.get(part))\n return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)\n fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))\n}\n\nconst onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {\n if (er) {\n if (er.path && path.dirname(er.path) === cwd &&\n (er.code === 'ENOTDIR' || er.code === 'ENOENT'))\n return cb(new CwdError(cwd, er.code))\n\n fs.lstat(part, (statEr, st) => {\n if (statEr)\n cb(statEr)\n else if (st.isDirectory())\n mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)\n else if (unlink)\n fs.unlink(part, er => {\n if (er)\n return cb(er)\n fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))\n })\n else if (st.isSymbolicLink())\n return cb(new SymlinkError(part, part + '/' + parts.join('/')))\n else\n cb(er)\n })\n } else {\n created = created || part\n mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)\n }\n}\n\nconst mkdirSync = module.exports.sync = (dir, opt) => {\n // if there's any overlap between mask and mode,\n // then we'll need an explicit chmod\n const umask = opt.umask\n const mode = opt.mode | 0o0700\n const needChmod = (mode & umask) !== 0\n\n const uid = opt.uid\n const gid = opt.gid\n const doChown = typeof uid === 'number' &&\n typeof gid === 'number' &&\n ( uid !== opt.processUid || gid !== opt.processGid )\n\n const preserve = opt.preserve\n const unlink = opt.unlink\n const cache = opt.cache\n const cwd = opt.cwd\n\n const done = (created) => {\n cache.set(dir, true)\n if (created && doChown)\n chownr.sync(created, uid, gid)\n if (needChmod)\n fs.chmodSync(dir, mode)\n }\n\n if (cache && cache.get(dir) === true)\n return done()\n\n if (dir === cwd) {\n let ok = false\n let code = 'ENOTDIR'\n try {\n ok = fs.lstatSync(dir).isDirectory()\n } catch (er) {\n code = er.code\n } finally {\n if (!ok)\n throw new CwdError(dir, code)\n }\n done()\n return\n }\n\n if (preserve)\n return done(mkdirp.sync(dir, mode))\n\n const sub = path.relative(cwd, dir)\n const parts = sub.split(/\\/|\\\\/)\n let created = null\n for (let p = parts.shift(), part = cwd;\n p && (part += '/' + p);\n p = parts.shift()) {\n\n if (cache.get(part))\n continue\n\n try {\n fs.mkdirSync(part, mode)\n created = created || part\n cache.set(part, true)\n } catch (er) {\n if (er.path && path.dirname(er.path) === cwd &&\n (er.code === 'ENOTDIR' || er.code === 'ENOENT'))\n return new CwdError(cwd, er.code)\n\n const st = fs.lstatSync(part)\n if (st.isDirectory()) {\n cache.set(part, true)\n continue\n } else if (unlink) {\n fs.unlinkSync(part)\n fs.mkdirSync(part, mode)\n created = created || part\n cache.set(part, true)\n continue\n } else if (st.isSymbolicLink())\n return new SymlinkError(part, part + '/' + parts.join('/'))\n }\n }\n\n return done(created)\n}\n","var path = require('path');\nvar fs = require('fs');\nvar _0777 = parseInt('0777', 8);\n\nmodule.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;\n\nfunction mkdirP (p, opts, f, made) {\n if (typeof opts === 'function') {\n f = opts;\n opts = {};\n }\n else if (!opts || typeof opts !== 'object') {\n opts = { mode: opts };\n }\n \n var mode = opts.mode;\n var xfs = opts.fs || fs;\n \n if (mode === undefined) {\n mode = _0777 & (~process.umask());\n }\n if (!made) made = null;\n \n var cb = f || function () {};\n p = path.resolve(p);\n \n xfs.mkdir(p, mode, function (er) {\n if (!er) {\n made = made || p;\n return cb(null, made);\n }\n switch (er.code) {\n case 'ENOENT':\n mkdirP(path.dirname(p), opts, function (er, made) {\n if (er) cb(er, made);\n else mkdirP(p, opts, cb, made);\n });\n break;\n\n // In the case of any other error, just see if there's a dir\n // there already. If so, then hooray! If not, then something\n // is borked.\n default:\n xfs.stat(p, function (er2, stat) {\n // if the stat fails, then that's super weird.\n // let the original error be the failure reason.\n if (er2 || !stat.isDirectory()) cb(er, made)\n else cb(null, made);\n });\n break;\n }\n });\n}\n\nmkdirP.sync = function sync (p, opts, made) {\n if (!opts || typeof opts !== 'object') {\n opts = { mode: opts };\n }\n \n var mode = opts.mode;\n var xfs = opts.fs || fs;\n \n if (mode === undefined) {\n mode = _0777 & (~process.umask());\n }\n if (!made) made = null;\n\n p = path.resolve(p);\n\n try {\n xfs.mkdirSync(p, mode);\n made = made || p;\n }\n catch (err0) {\n switch (err0.code) {\n case 'ENOENT' :\n made = sync(path.dirname(p), opts, made);\n sync(p, opts, made);\n break;\n\n // In the case of any other error, just see if there's a dir\n // there already. If so, then hooray! If not, then something\n // is borked.\n default:\n var stat;\n try {\n stat = xfs.statSync(p);\n }\n catch (err1) {\n throw err0;\n }\n if (!stat.isDirectory()) throw err0;\n break;\n }\n }\n\n return made;\n};\n","module.exports = chownr\nchownr.sync = chownrSync\n\nvar fs = require(\"fs\")\n, path = require(\"path\")\n\nfunction chownr (p, uid, gid, cb) {\n fs.readdir(p, function (er, children) {\n // any error other than ENOTDIR means it's not readable, or\n // doesn't exist. give up.\n if (er && er.code !== \"ENOTDIR\") return cb(er)\n if (er || !children.length) return fs.chown(p, uid, gid, cb)\n\n var len = children.length\n , errState = null\n children.forEach(function (child) {\n var pathChild = path.resolve(p, child);\n fs.lstat(pathChild, function(er, stats) {\n if (er)\n return cb(er)\n if (!stats.isSymbolicLink())\n chownr(pathChild, uid, gid, then)\n else\n then()\n })\n })\n function then (er) {\n if (errState) return\n if (er) return cb(errState = er)\n if (-- len === 0) return fs.chown(p, uid, gid, cb)\n }\n })\n}\n\nfunction chownrSync (p, uid, gid) {\n var children\n try {\n children = fs.readdirSync(p)\n } catch (er) {\n if (er && er.code === \"ENOTDIR\") return fs.chownSync(p, uid, gid)\n throw er\n }\n if (!children.length) return fs.chownSync(p, uid, gid)\n\n children.forEach(function (child) {\n var pathChild = path.resolve(p, child)\n var stats = fs.lstatSync(pathChild)\n if (!stats.isSymbolicLink())\n chownrSync(pathChild, uid, gid)\n })\n return fs.chownSync(p, uid, gid)\n}\n","module.exports = require(\"crypto\");","module.exports = require(\"vm\");","module.exports = require(\"module\");","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst sync_stdio_1 = require(\"./sync-stdio\");\nclass InputOutput {\n constructor() {\n this.debug = false;\n this.stdio = new sync_stdio_1.SyncStdio();\n }\n write(obj) {\n const output = JSON.stringify(obj);\n this.stdio.writeLine(output);\n if (this.debug) {\n this.stdio.writeErrorLine('< ' + output);\n }\n }\n read() {\n let reqLine = this.stdio.readLine();\n if (!reqLine) {\n return undefined;\n }\n // skip recorded responses\n if (reqLine.indexOf('< ') === 0) {\n return this.read();\n }\n // stip \"> \" from recorded requests\n if (reqLine.indexOf('> ') === 0) {\n reqLine = reqLine.substr(2);\n }\n const input = JSON.parse(reqLine);\n if (this.debug) {\n this.stdio.writeErrorLine('> ' + JSON.stringify(input));\n }\n return input;\n }\n}\nexports.InputOutput = InputOutput;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst fs = require(\"fs\");\nconst STDIN_FD = 0;\nconst STDOUT_FD = 1;\nconst STDERR_FD = 2;\nconst INPUT_BUFFER_SIZE = 1024 * 1024; // not related to max line length\nclass SyncStdio {\n constructor() {\n this.inputQueue = new Array();\n this.currentLine = '';\n }\n writeErrorLine(line) {\n this.writeBuffer(Buffer.from(`${line}\\n`), STDERR_FD);\n }\n writeLine(line) {\n this.writeBuffer(Buffer.from(`${line}\\n`), STDOUT_FD);\n }\n readLine() {\n if (this.inputQueue.length > 0) {\n return this.inputQueue.shift();\n }\n const buff = Buffer.alloc(INPUT_BUFFER_SIZE);\n const read = fs.readSync(STDIN_FD, buff, 0, buff.length, null);\n if (read === 0) {\n return undefined;\n }\n const str = buff.slice(0, read).toString();\n for (let i = 0; i < str.length; ++i) {\n const ch = str[i];\n if (ch === '\\n') {\n this.inputQueue.push(this.currentLine);\n this.currentLine = '';\n }\n else {\n this.currentLine += ch;\n }\n }\n const next = this.inputQueue.shift();\n if (next == null) {\n return this.readLine();\n }\n return next;\n }\n writeBuffer(buffer, fd) {\n let offset = 0;\n while (offset < buffer.length) {\n try {\n offset += fs.writeSync(fd, buffer, offset);\n }\n catch (e) {\n if (e.code !== 'EAGAIN') {\n throw e;\n }\n }\n }\n }\n}\nexports.SyncStdio = SyncStdio;\n"],"sourceRoot":""} \ No newline at end of file diff --git a/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/key.snk b/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/key.snk new file mode 100644 index 0000000000..709adb3d6a Binary files /dev/null and b/packages/jsii-dotnet-runtime/src/Amazon.JSII.Runtime/key.snk differ diff --git a/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/.jsii b/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/.jsii index e351e0ce08..dc6ac23805 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/.jsii +++ b/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/.jsii @@ -92,6 +92,25 @@ ], "name": "EnumFromScopedModule" }, + "@scope/jsii-calc-lib.IDoublable": { + "assembly": "@scope/jsii-calc-lib", + "docs": { + "comment": "The general contract for a concrete number." + }, + "fqn": "@scope/jsii-calc-lib.IDoublable", + "kind": "interface", + "name": "IDoublable", + "properties": [ + { + "abstract": true, + "immutable": true, + "name": "doubleValue", + "type": { + "primitive": "number" + } + } + ] + }, "@scope/jsii-calc-lib.IFriendly": { "assembly": "@scope/jsii-calc-lib", "docs": { @@ -184,6 +203,11 @@ } ] }, + "interfaces": [ + { + "fqn": "@scope/jsii-calc-lib.IDoublable" + } + ], "kind": "class", "name": "Number", "properties": [ @@ -193,6 +217,9 @@ }, "immutable": true, "name": "doubleValue", + "overrides": { + "fqn": "@scope/jsii-calc-lib.IDoublable" + }, "type": { "primitive": "number" } @@ -324,5 +351,5 @@ } }, "version": "0.7.8", - "fingerprint": "16sTfW7oHGAWfPOj50gWvXsI1REjbNbpk7VUpG1JVVI=" + "fingerprint": "HzcyHys0b9gFmP4dogeIJmGE6GVtrSo/P0S54Vd/X8U=" } diff --git a/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/IDoublableProxy.cs b/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/IDoublableProxy.cs new file mode 100644 index 0000000000..2d5c772875 --- /dev/null +++ b/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/IDoublableProxy.cs @@ -0,0 +1,19 @@ +using Amazon.JSII.Runtime.Deputy; + +namespace Amazon.JSII.Tests.CalculatorNamespace.LibNamespace +{ + /// The general contract for a concrete number. + [JsiiTypeProxy(typeof(IIDoublable), "@scope/jsii-calc-lib.IDoublable")] + internal sealed class IDoublableProxy : DeputyBase, IIDoublable + { + private IDoublableProxy(ByRefValue reference): base(reference) + { + } + + [JsiiProperty("doubleValue", "{\"primitive\":\"number\"}")] + public double DoubleValue + { + get => GetInstanceProperty(); + } + } +} \ No newline at end of file diff --git a/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/IIDoublable.cs b/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/IIDoublable.cs new file mode 100644 index 0000000000..d7260984a8 --- /dev/null +++ b/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/IIDoublable.cs @@ -0,0 +1,15 @@ +using Amazon.JSII.Runtime.Deputy; + +namespace Amazon.JSII.Tests.CalculatorNamespace.LibNamespace +{ + /// The general contract for a concrete number. + [JsiiInterface(typeof(IIDoublable), "@scope/jsii-calc-lib.IDoublable")] + public interface IIDoublable + { + [JsiiProperty("doubleValue", "{\"primitive\":\"number\"}")] + double DoubleValue + { + get; + } + } +} \ No newline at end of file diff --git a/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/Number.cs b/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/Number.cs index f86323d0cc..e19ce0ad51 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/Number.cs +++ b/packages/jsii-pacmak/test/expected.jsii-calc-lib/dotnet/Amazon.JSII.Tests.CalculatorPackageId.LibPackageId/Amazon/JSII/Tests/CalculatorNamespace/LibNamespace/Number.cs @@ -4,7 +4,7 @@ namespace Amazon.JSII.Tests.CalculatorNamespace.LibNamespace { /// Represents a concrete number. [JsiiClass(typeof(Number), "@scope/jsii-calc-lib.Number", "[{\"name\":\"value\",\"type\":{\"primitive\":\"number\"}}]")] - public class Number : Value_ + public class Number : Value_, IIDoublable { public Number(double value): base(new DeputyProps(new object[]{value})) { diff --git a/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/$Module.java b/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/$Module.java index 5b1453d5d0..52c6de750a 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/$Module.java +++ b/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/$Module.java @@ -19,6 +19,7 @@ public List> getDependencies() { protected Class resolveClass(final String fqn) throws ClassNotFoundException { switch (fqn) { case "@scope/jsii-calc-lib.EnumFromScopedModule": return software.amazon.jsii.tests.calculator.lib.EnumFromScopedModule.class; + case "@scope/jsii-calc-lib.IDoublable": return software.amazon.jsii.tests.calculator.lib.IDoublable.class; case "@scope/jsii-calc-lib.IFriendly": return software.amazon.jsii.tests.calculator.lib.IFriendly.class; case "@scope/jsii-calc-lib.MyFirstStruct": return software.amazon.jsii.tests.calculator.lib.MyFirstStruct.class; case "@scope/jsii-calc-lib.Number": return software.amazon.jsii.tests.calculator.lib.Number.class; diff --git a/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/IDoublable.java b/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/IDoublable.java new file mode 100644 index 0000000000..a7cf2325fe --- /dev/null +++ b/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/IDoublable.java @@ -0,0 +1,23 @@ +package software.amazon.jsii.tests.calculator.lib; + +/** + * The general contract for a concrete number. + */ +@javax.annotation.Generated(value = "jsii-pacmak") +public interface IDoublable extends software.amazon.jsii.JsiiSerializable { + java.lang.Number getDoubleValue(); + + /** + * A proxy class which represents a concrete javascript instance of this type. + */ + final static class Jsii$Proxy extends software.amazon.jsii.JsiiObject implements software.amazon.jsii.tests.calculator.lib.IDoublable { + protected Jsii$Proxy(final software.amazon.jsii.JsiiObject.InitializationMode mode) { + super(mode); + } + + @Override + public java.lang.Number getDoubleValue() { + return this.jsiiGet("doubleValue", java.lang.Number.class); + } + } +} diff --git a/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/Number.java b/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/Number.java index a14b6fe024..0f023695d2 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/Number.java +++ b/packages/jsii-pacmak/test/expected.jsii-calc-lib/java/src/main/java/software/amazon/jsii/tests/calculator/lib/Number.java @@ -5,7 +5,7 @@ */ @javax.annotation.Generated(value = "jsii-pacmak") @software.amazon.jsii.Jsii(module = software.amazon.jsii.tests.calculator.lib.$Module.class, fqn = "@scope/jsii-calc-lib.Number") -public class Number extends software.amazon.jsii.tests.calculator.lib.Value { +public class Number extends software.amazon.jsii.tests.calculator.lib.Value implements software.amazon.jsii.tests.calculator.lib.IDoublable { protected Number(final software.amazon.jsii.JsiiObject.InitializationMode mode) { super(mode); } @@ -21,6 +21,7 @@ public Number(final java.lang.Number value) { /** * The number multiplied by 2. */ + @Override public java.lang.Number getDoubleValue() { return this.jsiiGet("doubleValue", java.lang.Number.class); } diff --git a/packages/jsii-pacmak/test/expected.jsii-calc-lib/sphinx/_scope_jsii-calc-lib.rst b/packages/jsii-pacmak/test/expected.jsii-calc-lib/sphinx/_scope_jsii-calc-lib.rst index 9cac7a500a..0121d0e144 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc-lib/sphinx/_scope_jsii-calc-lib.rst +++ b/packages/jsii-pacmak/test/expected.jsii-calc-lib/sphinx/_scope_jsii-calc-lib.rst @@ -158,6 +158,43 @@ EnumFromScopedModule (enum) .. py:data:: Value2 +IDoublable (interface) +^^^^^^^^^^^^^^^^^^^^^^ + +.. py:class:: IDoublable + + **Language-specific names:** + + .. tabs:: + + .. code-tab:: c# + + using Amazon.JSII.Tests.CalculatorNamespace.LibNamespace; + + .. code-tab:: java + + import software.amazon.jsii.tests.calculator.lib.IDoublable; + + .. code-tab:: javascript + + // IDoublable is an interface + + .. code-tab:: typescript + + import { IDoublable } from '@scope/jsii-calc-lib'; + + + + The general contract for a concrete number. + + + + + .. py:attribute:: doubleValue + + :type: number *(readonly)* *(abstract)* + + IFriendly (interface) ^^^^^^^^^^^^^^^^^^^^^ @@ -283,11 +320,14 @@ Number :extends: :py:class:`~@scope/jsii-calc-lib.Value`\ + :implements: :py:class:`~@scope/jsii-calc-lib.IDoublable`\ :param value: The number. :type value: number .. py:attribute:: doubleValue + *Implements* :py:meth:`@scope/jsii-calc-lib.IDoublable.doubleValue` + The number multiplied by 2. diff --git a/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/.jsii b/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/.jsii index 2aa5115a4b..c6ffb76b77 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/.jsii +++ b/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/.jsii @@ -436,7 +436,7 @@ "type": { "collection": { "elementtype": { - "primitive": "number" + "fqn": "@scope/jsii-calc-lib.Number" }, "kind": "map" } @@ -486,6 +486,9 @@ }, { "primitive": "number" + }, + { + "fqn": "@scope/jsii-calc-lib.Number" } ] } @@ -507,6 +510,9 @@ }, { "fqn": "jsii-calc.Multiply" + }, + { + "fqn": "@scope/jsii-calc-lib.Number" } ] } @@ -1559,7 +1565,7 @@ "abstract": true, "name": "obtainNumber", "returns": { - "primitive": "number" + "fqn": "@scope/jsii-calc-lib.IDoublable" } } ], @@ -1570,7 +1576,7 @@ "immutable": true, "name": "numberProp", "type": { - "primitive": "number" + "fqn": "@scope/jsii-calc-lib.Number" } } ] @@ -3401,5 +3407,5 @@ } }, "version": "0.7.8", - "fingerprint": "fhzPkiQLwsWAnEdA5+YEotaWom2Av1au0q2FzpexXaQ=" + "fingerprint": "jHSXTzCSZbwYMvLKpeZB6SE8hNgYgt9/2JF1ihM41SI=" } diff --git a/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/AllTypes.cs b/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/AllTypes.cs index b4a4896b31..489e96e84c 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/AllTypes.cs +++ b/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/AllTypes.cs @@ -1,5 +1,6 @@ using Amazon.JSII.Runtime.Deputy; using Amazon.JSII.Tests.CalculatorNamespace.composition; +using Amazon.JSII.Tests.CalculatorNamespace.LibNamespace; using Newtonsoft.Json.Linq; using System; using System.Collections.Generic; @@ -80,10 +81,10 @@ public virtual JObject JsonProperty set => SetInstanceProperty(value); } - [JsiiProperty("mapProperty", "{\"collection\":{\"kind\":\"map\",\"elementtype\":{\"primitive\":\"number\"}}}")] - public virtual IDictionary MapProperty + [JsiiProperty("mapProperty", "{\"collection\":{\"kind\":\"map\",\"elementtype\":{\"fqn\":\"@scope/jsii-calc-lib.Number\"}}}")] + public virtual IDictionary MapProperty { - get => GetInstanceProperty>(); + get => GetInstanceProperty>(); set => SetInstanceProperty(value); } @@ -108,14 +109,14 @@ public virtual object[] UnionArrayProperty set => SetInstanceProperty(value); } - [JsiiProperty("unionMapProperty", "{\"collection\":{\"kind\":\"map\",\"elementtype\":{\"union\":{\"types\":[{\"primitive\":\"string\"},{\"primitive\":\"number\"}]}}}}")] + [JsiiProperty("unionMapProperty", "{\"collection\":{\"kind\":\"map\",\"elementtype\":{\"union\":{\"types\":[{\"primitive\":\"string\"},{\"primitive\":\"number\"},{\"fqn\":\"@scope/jsii-calc-lib.Number\"}]}}}}")] public virtual IDictionary UnionMapProperty { get => GetInstanceProperty>(); set => SetInstanceProperty(value); } - [JsiiProperty("unionProperty", "{\"union\":{\"types\":[{\"primitive\":\"string\"},{\"primitive\":\"number\"},{\"fqn\":\"jsii-calc.Multiply\"}]}}")] + [JsiiProperty("unionProperty", "{\"union\":{\"types\":[{\"primitive\":\"string\"},{\"primitive\":\"number\"},{\"fqn\":\"jsii-calc.Multiply\"},{\"fqn\":\"@scope/jsii-calc-lib.Number\"}]}}")] public virtual object UnionProperty { get => GetInstanceProperty(); diff --git a/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/IIReturnsNumber.cs b/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/IIReturnsNumber.cs index 4c3ab80cc1..e62565eb16 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/IIReturnsNumber.cs +++ b/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/IIReturnsNumber.cs @@ -1,17 +1,18 @@ using Amazon.JSII.Runtime.Deputy; +using Amazon.JSII.Tests.CalculatorNamespace.LibNamespace; namespace Amazon.JSII.Tests.CalculatorNamespace { [JsiiInterface(typeof(IIReturnsNumber), "jsii-calc.IReturnsNumber")] public interface IIReturnsNumber { - [JsiiProperty("numberProp", "{\"primitive\":\"number\"}")] - double NumberProp + [JsiiProperty("numberProp", "{\"fqn\":\"@scope/jsii-calc-lib.Number\"}")] + Number NumberProp { get; } - [JsiiMethod("obtainNumber", "{\"primitive\":\"number\"}", "[]")] - double ObtainNumber(); + [JsiiMethod("obtainNumber", "{\"fqn\":\"@scope/jsii-calc-lib.IDoublable\"}", "[]")] + IIDoublable ObtainNumber(); } } \ No newline at end of file diff --git a/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/IReturnsNumberProxy.cs b/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/IReturnsNumberProxy.cs index e77ef70356..bc689a1bc8 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/IReturnsNumberProxy.cs +++ b/packages/jsii-pacmak/test/expected.jsii-calc/dotnet/Amazon.JSII.Tests.CalculatorPackageId/Amazon/JSII/Tests/CalculatorNamespace/IReturnsNumberProxy.cs @@ -1,4 +1,5 @@ using Amazon.JSII.Runtime.Deputy; +using Amazon.JSII.Tests.CalculatorNamespace.LibNamespace; namespace Amazon.JSII.Tests.CalculatorNamespace { @@ -9,16 +10,16 @@ private IReturnsNumberProxy(ByRefValue reference): base(reference) { } - [JsiiProperty("numberProp", "{\"primitive\":\"number\"}")] - public double NumberProp + [JsiiProperty("numberProp", "{\"fqn\":\"@scope/jsii-calc-lib.Number\"}")] + public Number NumberProp { - get => GetInstanceProperty(); + get => GetInstanceProperty(); } - [JsiiMethod("obtainNumber", "{\"primitive\":\"number\"}", "[]")] - public double ObtainNumber() + [JsiiMethod("obtainNumber", "{\"fqn\":\"@scope/jsii-calc-lib.IDoublable\"}", "[]")] + public IIDoublable ObtainNumber() { - return InvokeInstanceMethod(new object[]{}); + return InvokeInstanceMethod(new object[]{}); } } } \ No newline at end of file diff --git a/packages/jsii-pacmak/test/expected.jsii-calc/java/src/main/java/software/amazon/jsii/tests/calculator/AllTypes.java b/packages/jsii-pacmak/test/expected.jsii-calc/java/src/main/java/software/amazon/jsii/tests/calculator/AllTypes.java index b57eb209d9..c6e710a699 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc/java/src/main/java/software/amazon/jsii/tests/calculator/AllTypes.java +++ b/packages/jsii-pacmak/test/expected.jsii-calc/java/src/main/java/software/amazon/jsii/tests/calculator/AllTypes.java @@ -79,11 +79,11 @@ public void setJsonProperty(final com.fasterxml.jackson.databind.node.ObjectNode this.jsiiSet("jsonProperty", java.util.Objects.requireNonNull(value, "jsonProperty is required")); } - public java.util.Map getMapProperty() { + public java.util.Map getMapProperty() { return this.jsiiGet("mapProperty", java.util.Map.class); } - public void setMapProperty(final java.util.Map value) { + public void setMapProperty(final java.util.Map value) { this.jsiiSet("mapProperty", java.util.Objects.requireNonNull(value, "mapProperty is required")); } @@ -135,6 +135,10 @@ public void setUnionProperty(final software.amazon.jsii.tests.calculator.Multipl this.jsiiSet("unionProperty", java.util.Objects.requireNonNull(value, "unionProperty is required")); } + public void setUnionProperty(final software.amazon.jsii.tests.calculator.lib.Number value) { + this.jsiiSet("unionProperty", java.util.Objects.requireNonNull(value, "unionProperty is required")); + } + public java.util.List getUnknownArrayProperty() { return this.jsiiGet("unknownArrayProperty", java.util.List.class); } diff --git a/packages/jsii-pacmak/test/expected.jsii-calc/java/src/main/java/software/amazon/jsii/tests/calculator/IReturnsNumber.java b/packages/jsii-pacmak/test/expected.jsii-calc/java/src/main/java/software/amazon/jsii/tests/calculator/IReturnsNumber.java index 352f2ef930..1b81b6f07b 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc/java/src/main/java/software/amazon/jsii/tests/calculator/IReturnsNumber.java +++ b/packages/jsii-pacmak/test/expected.jsii-calc/java/src/main/java/software/amazon/jsii/tests/calculator/IReturnsNumber.java @@ -2,8 +2,8 @@ @javax.annotation.Generated(value = "jsii-pacmak") public interface IReturnsNumber extends software.amazon.jsii.JsiiSerializable { - java.lang.Number getNumberProp(); - java.lang.Number obtainNumber(); + software.amazon.jsii.tests.calculator.lib.Number getNumberProp(); + software.amazon.jsii.tests.calculator.lib.IDoublable obtainNumber(); /** * A proxy class which represents a concrete javascript instance of this type. @@ -14,13 +14,13 @@ final static class Jsii$Proxy extends software.amazon.jsii.JsiiObject implements } @Override - public java.lang.Number getNumberProp() { - return this.jsiiGet("numberProp", java.lang.Number.class); + public software.amazon.jsii.tests.calculator.lib.Number getNumberProp() { + return this.jsiiGet("numberProp", software.amazon.jsii.tests.calculator.lib.Number.class); } @Override - public java.lang.Number obtainNumber() { - return this.jsiiCall("obtainNumber", java.lang.Number.class); + public software.amazon.jsii.tests.calculator.lib.IDoublable obtainNumber() { + return this.jsiiCall("obtainNumber", software.amazon.jsii.tests.calculator.lib.IDoublable.class); } } } diff --git a/packages/jsii-pacmak/test/expected.jsii-calc/sphinx/jsii-calc.rst b/packages/jsii-pacmak/test/expected.jsii-calc/sphinx/jsii-calc.rst index cbc903a8c6..f7aa575961 100644 --- a/packages/jsii-pacmak/test/expected.jsii-calc/sphinx/jsii-calc.rst +++ b/packages/jsii-pacmak/test/expected.jsii-calc/sphinx/jsii-calc.rst @@ -434,7 +434,7 @@ AllTypes .. py:attribute:: mapProperty - :type: string => number + :type: string => :py:class:`@scope/jsii-calc-lib.Number`\ .. py:attribute:: numberProperty @@ -454,12 +454,12 @@ AllTypes .. py:attribute:: unionMapProperty - :type: string => (string or number) + :type: string => (string or number or :py:class:`@scope/jsii-calc-lib.Number`\ ) .. py:attribute:: unionProperty - :type: string or number or :py:class:`~jsii-calc.Multiply`\ + :type: string or number or :py:class:`~jsii-calc.Multiply`\ or :py:class:`@scope/jsii-calc-lib.Number`\ .. py:attribute:: unknownArrayProperty @@ -1761,12 +1761,12 @@ IReturnsNumber (interface) .. py:attribute:: numberProp - :type: number *(readonly)* *(abstract)* + :type: :py:class:`@scope/jsii-calc-lib.Number`\ *(readonly)* *(abstract)* - .. py:method:: obtainNumber() -> number + .. py:method:: obtainNumber() -> @scope/jsii-calc-lib.IDoublable - :rtype: number + :rtype: :py:class:`@scope/jsii-calc-lib.IDoublable`\ :abstract: Yes diff --git a/packages/jsii/lib/assembler.ts b/packages/jsii/lib/assembler.ts index d46dc342ac..4d2ae4ea06 100644 --- a/packages/jsii/lib/assembler.ts +++ b/packages/jsii/lib/assembler.ts @@ -11,7 +11,6 @@ import literate = require('./literate'); import { ProjectInfo } from './project-info'; import utils = require('./utils'); import { Validator } from './validator'; -import { NamedTypeReference, isInterfaceType } from 'jsii-spec'; // tslint:disable:no-var-requires Modules without TypeScript definitions const sortJson = require('sort-json'); @@ -30,9 +29,11 @@ export class Assembler implements Emitter { /** * @param projectInfo information about the package being assembled * @param program the TypeScript program to be assembled from + * @param stdlib the directory where the TypeScript stdlib is rooted */ public constructor(public readonly projectInfo: ProjectInfo, - public readonly program: ts.Program) {} + public readonly program: ts.Program, + public readonly stdlib: string) {} private get _typeChecker(): ts.TypeChecker { return this.program.getTypeChecker(); @@ -152,7 +153,7 @@ export class Assembler implements Emitter { * that case anyway. */ // tslint:disable-next-line:max-line-length - private _deferUntilTypesAvailable(fqn: string, baseTypes: NamedTypeReference[], referencingNode: ts.Node, cb: (...xs: spec.Type[]) => void) { + private _deferUntilTypesAvailable(fqn: string, baseTypes: spec.NamedTypeReference[], referencingNode: ts.Node, cb: (...xs: spec.Type[]) => void) { // We can do this one eagerly if (baseTypes.length === 0) { cb(); @@ -581,7 +582,7 @@ export class Assembler implements Emitter { jsiiType.datatype = true; } for (const base of bases) { - if (isInterfaceType(base) && !base.datatype) { + if (spec.isInterfaceType(base) && !base.datatype) { jsiiType.datatype = undefined; } } @@ -731,7 +732,7 @@ export class Assembler implements Emitter { type = this._typeChecker.getApparentType(type); } - const primitiveType = _tryMakePrimitiveType(); + const primitiveType = _tryMakePrimitiveType.call(this); if (primitiveType) { return primitiveType; } if (type.isUnion() && !_isEnumLike(type)) { @@ -809,7 +810,7 @@ export class Assembler implements Emitter { }; } - function _tryMakePrimitiveType(): spec.PrimitiveTypeReference | undefined { + function _tryMakePrimitiveType(this: Assembler): spec.PrimitiveTypeReference | undefined { if (!type.symbol) { // tslint:disable-next-line:no-bitwise if (type.flags & ts.TypeFlags.Object) { @@ -819,7 +820,7 @@ export class Assembler implements Emitter { if (type.flags & (ts.TypeFlags.Any | ts.TypeFlags.Unknown)) { return { primitive: spec.PrimitiveType.Any, optional: true }; } - } else { + } else if (type.symbol.valueDeclaration && isUnder(type.symbol.valueDeclaration.getSourceFile().fileName, this.stdlib)) { switch (type.symbol.name) { case 'Boolean': return { primitive: spec.PrimitiveType.Boolean }; @@ -833,6 +834,11 @@ export class Assembler implements Emitter { } // Not a primitive type! return undefined; + + function isUnder(file: string, dir: string): boolean { + const relative = path.relative(dir, file); + return !relative.startsWith(path.sep) && !relative.startsWith('..'); + } } async function _unionType(this: Assembler): Promise { diff --git a/packages/jsii/lib/compiler.ts b/packages/jsii/lib/compiler.ts index 7fc7528745..1c8e4b780c 100644 --- a/packages/jsii/lib/compiler.ts +++ b/packages/jsii/lib/compiler.ts @@ -67,13 +67,16 @@ export class Compiler implements Emitter { private async _buildOnce(files: string[]): Promise { await this._writeTypeScriptConfig(); const host = ts.createCompilerHost(COMPILER_OPTIONS); + if (!host.getDefaultLibLocation) { + throw new Error('No default library location was found on the TypeScript compiler host!'); + } host.getCurrentDirectory = () => this.options.projectInfo.projectRoot; const prog = ts.createProgram( files.concat(_pathOfLibraries(host)), COMPILER_OPTIONS, host ); - return await this._consumeProgram(prog); + return await this._consumeProgram(prog, host.getDefaultLibLocation()); } private async _startWatch(): Promise { @@ -84,9 +87,12 @@ export class Compiler implements Emitter { { ...COMPILER_OPTIONS, noEmitOnError: false }, { ...ts.sys, getCurrentDirectory() { return projectRoot; } } ); + if (!host.getDefaultLibLocation) { + throw new Error('No default library location was found on the TypeScript compiler host!'); + } const orig = host.afterProgramCreate; host.afterProgramCreate = async builderProgram => { - await this._consumeProgram(builderProgram.getProgram()); + await this._consumeProgram(builderProgram.getProgram(), host.getDefaultLibLocation!()); if (orig) { orig.call(host, builderProgram); } }; ts.createWatchProgram(host); @@ -94,13 +100,13 @@ export class Compiler implements Emitter { }); } - private async _consumeProgram(program: ts.Program): Promise { + private async _consumeProgram(program: ts.Program, stdlib: string): Promise { const emit = program.emit(); if (emit.emitSkipped) { LOG.error('Compilation errors prevented the JSII assembly from being created'); return emit; } - const assembler = new Assembler(this.options.projectInfo, program); + const assembler = new Assembler(this.options.projectInfo, program, stdlib); const assmEmit = await assembler.emit(); if (assmEmit.emitSkipped) { LOG.error('Type model errors prevented the JSII assembly from being created');